mariadb-corporation / mariadb-connector-nodejs

MariaDB Connector/Node.js is used to connect applications developed on Node.js to MariaDB and MySQL databases. MariaDB Connector/Node.js is LGPL licensed.
GNU Lesser General Public License v2.1
363 stars 93 forks source link

batch insert operation hangs with 3.3.0 #281

Open markddrake opened 2 months ago

markddrake commented 2 months ago

Given the following environment (Windows 11)

C:\Development\YADAMU\src\scratch\mariadb>node -v
v20.7.0

C:\Development\YADAMU\src\scratch\mariadb>npm ls
yadamu@1.0.0 C:\Development\YADAMU\src
+-- @aws-sdk/client-s3@3.507.0
+-- @aws-sdk/lib-storage@3.507.0
+-- @azure/storage-blob@12.17.0
+-- @electron/remote@2.1.2
+-- bootstrap-icons@1.11.3
+-- bootstrap@5.3.2
+-- cookie-parser@1.4.6
+-- csv-parser@3.0.0
+-- electron-packager@17.1.2
+-- electron@27.3.1
+-- express-session@1.18.0
+-- express@4.18.2
+-- font-awesome@4.7.0
+-- ibm_db_electron@npm:ibm_db@3.2.3
+-- ibm_db@3.2.3
+-- install@0.13.0
+-- jquery@3.7.1
+-- mariadb@3.3.0
+-- mime-types@2.1.35
+-- mongodb@6.3.0
+-- mssql@10.0.2
+-- mysql@2.18.1
+-- npm@10.4.0
+-- oracledb@6.3.0
+-- pg-copy-streams@6.0.6
+-- pg-query-stream@4.5.3
+-- pg@8.11.3
+-- readable-stream@4.5.2
+-- snowflake-sdk@1.9.3
+-- uuid@9.0.1
`-- wkx@0.5.0

And the following code


C:\Development\YADAMU\src\scratch\mariadb>type batchInsert.js
"use strict"
import mariadb from 'mariadb';
import fs from 'fs';

async function main() {

    const connectionDetails = {
            host      : "yadamu-db1"
           ,user      : "root"
           ,password  : "oracle"
           ,port      : 3307
           ,database  : "mysql"
           ,multipleStatements: true
           ,typeCast:true
                   ,bigNumberStrings:true
                   ,dateStrings:true
                   ,rowsAsArray:true}

  let results;

  try {

    const pool = mariadb.createPool(connectionDetails);
    const conn = await pool.getConnection();
    results = await conn.query(`SET AUTOCOMMIT = 0, TIME_ZONE = '+00:00',SESSION INTERACTIVE_TIMEOUT = 600000, WAIT_TIMEOUT = 600000, SQL_MODE='ANSI_QUOTES,PAD_CHAR_TO_FULL_LENGTH', GROUP_CONCAT_MAX_LEN = 1024000, GLOBAL LOCAL_INFILE = 'ON';`);
    results = await conn.query(`drop schema if exists "YADAMU_TEST";`);
    results = await conn.query(`create schema "YADAMU_TEST";`);

    results = await conn.query(`USE "YADAMU_TEST"`);
    results = await conn.query(`create table  if not exists "YADAMU_TEST"."character_types"(
  "character_col" char(1) ,"character_varying_col" longtext ,"character_max_col" mediumtext ,"character_varying_max_col" mediumtext ,"character_4000_col" varchar(4000) ,"character_varying_4000_col" varchar(4000) ,"character_64k_col" text ,"character_varying_64k_col" text ,"text_col" longtext)`);

    const testData = fs.readFileSync('TEST_DATA.json');
        const data = JSON.parse(testData)
        console.log('ROWS',data.length)

    const sql = `insert into "YADAMU_TEST"."character_types" ("character_col","character_varying_col","character_max_col","character_varying_max_col","character_4000_col","character_varying_4000_col","character_64k_col","character_varying_64k_col","text_col") values (?,?,?,?,?,?,?,?,?)`;

    try {

          results = await conn.beginTransaction()
          results = await conn.query(`SAVEPOINT YADAMU_INSERT; `)

          console.log('Insert')
      results = await conn.batch(sql,data);

          console.log('Commit')
          results = await conn.commit()
          console.log('Success')
    } catch (e) {
          console.log('Failed')
      console.log(e)
    }

   conn.end()
   pool.end();
 } catch(e) {
   console.log(e);
 }
}

main().then(() => {console.log("Complete")}).catch((e) => {console.log(e)})

The batch insert operation hangs

C:\Development\YADAMU\src\scratch\mariadb>node batchInsert.js
ROWS 4
Insert
^C
C:\Development\YADAMU\src\scratch\mariadb>

TEST_DATA.zip

rusher commented 2 months ago

Thanks for your detailled example. This will corrected in next version. As a walkaround, you can set option maxAllowedPacket to 16777216.

markddrake commented 2 months ago

I had set it already (actually to 1G)..

Modified test case checks, and if necessary sets MAX_ALLOWED_PACKET

C:\Development\YADAMU\src\scratch\mariadb>node -v
v20.7.0

C:\Development\YADAMU\src\scratch\mariadb>npm ls
yadamu@1.0.0 C:\Development\YADAMU\src
+-- @aws-sdk/client-s3@3.507.0
+-- @aws-sdk/lib-storage@3.507.0
+-- @azure/storage-blob@12.17.0
+-- @electron/remote@2.1.2
+-- bootstrap-icons@1.11.3
+-- bootstrap@5.3.2
+-- cookie-parser@1.4.6
+-- csv-parser@3.0.0
+-- electron-packager@17.1.2
+-- electron@27.3.1
+-- express-session@1.18.0
+-- express@4.18.2
+-- font-awesome@4.7.0
+-- ibm_db_electron@npm:ibm_db@3.2.3
+-- ibm_db@3.2.3
+-- install@0.13.0
+-- jquery@3.7.1
+-- mariadb@3.3.0
+-- mime-types@2.1.35
+-- mongodb@6.3.0
+-- mssql@10.0.2
+-- mysql@2.18.1
+-- npm@10.4.0
+-- oracledb@6.3.0
+-- pg-copy-streams@6.0.6
+-- pg-query-stream@4.5.3
+-- pg@8.11.3
+-- readable-stream@4.5.2
+-- snowflake-sdk@1.9.3
+-- uuid@9.0.1
`-- wkx@0.5.0

C:\Development\YADAMU\src\scratch\mariadb>type batchInsert.js
"use strict"
import mariadb from 'mariadb';
import fs from 'fs';

const MAX_ALLOWED_PACKET             = 1 * 1024 * 1024 * 1024;

const SQL_GET_MAX_ALLOWED_PACKET = `SELECT @@MAX_ALLOWED_PACKET`

const SQL_SET_MAX_ALLOWED_PACKET = `SET GLOBAL MAX_ALLOWED_PACKET=${MAX_ALLOWED_PACKET}`

async function main() {

    const connectionDetails = {
            host      : "yadamu-db1"
           ,user      : "root"
           ,password  : "oracle"
           ,port      : 3307
           ,database  : "mysql"
           ,multipleStatements: true
           ,typeCast:true
                   ,bigNumberStrings:true
                   ,dateStrings:true
                   ,rowsAsArray:true}

  let results;

  try {

    const pool = mariadb.createPool(connectionDetails);

        try {
      const conn = await pool.getConnection();
      let results = await conn.query(SQL_GET_MAX_ALLOWED_PACKET)
          const maxAllowedPacket = parseInt(results[0][0])
      if (maxAllowedPacket <  MAX_ALLOWED_PACKET) {

            console.log(`Increasing MAX_ALLOWED_PACKET to ${MAX_ALLOWED_PACKET}.`)
        results = await conn.query(SQL_SET_MAX_ALLOWED_PACKET)

          }
      conn.end()
    } catch (e) {
          console.log('MAX_ALLOWED_PACKET','Failed')
      console.log(e)
          return
    }

    const conn = await pool.getConnection();
    let results = await conn.query(SQL_GET_MAX_ALLOWED_PACKET)
    const maxAllowedPacket = parseInt(results[0][0])
    console.log(`MAX_ALLOWED_PACKET`,maxAllowedPacket)

    results = await conn.query(`SET AUTOCOMMIT = 0, TIME_ZONE = '+00:00',SESSION INTERACTIVE_TIMEOUT = 600000, WAIT_TIMEOUT = 600000, SQL_MODE='ANSI_QUOTES,PAD_CHAR_TO_FULL_LENGTH', GROUP_CONCAT_MAX_LEN = 1024000, GLOBAL LOCAL_INFILE = 'ON';`);
    results = await conn.query(`drop schema if exists "YADAMU_TEST";`);
    results = await conn.query(`create schema "YADAMU_TEST";`);

    results = await conn.query(`USE "YADAMU_TEST"`);
    results = await conn.query(`create table  if not exists "YADAMU_TEST"."character_types"(
  "character_col" char(1) ,"character_varying_col" longtext ,"character_max_col" mediumtext ,"character_varying_max_col" mediumtext ,"character_4000_col" varchar(4000) ,"character_varying_4000_col" varchar(4000) ,"character_64k_col" text ,"character_varying_64k_col" text ,"text_col" longtext)`);

    const testData = fs.readFileSync('TEST_DATA.json');
        const data = JSON.parse(testData)
        console.log('ROWS',data.length)

    const sql = `insert into "YADAMU_TEST"."character_types" ("character_col","character_varying_col","character_max_col","character_varying_max_col","character_4000_col","character_varying_4000_col","character_64k_col","character_varying_64k_col","text_col") values (?,?,?,?,?,?,?,?,?)`;

    try {

          results = await conn.beginTransaction()
          results = await conn.query(`SAVEPOINT YADAMU_INSERT; `)

          console.log('Insert')
      results = await conn.batch(sql,data);

          console.log('Commit')
          results = await conn.commit()
          console.log('Success')
    } catch (e) {
          console.log('Failed')
      console.log(e)
    }

   conn.end()
   pool.end();
 } catch(e) {
   console.log(e);
 }
}

main().then(() => {console.log("Complete")}).catch((e) => {console.log(e)})

C:\Development\YADAMU\src\scratch\mariadb>node batchInsert.js
MAX_ALLOWED_PACKET 1073741824
ROWS 4
Insert
^C
C:\Development\YADAMU\src\scratch\mariadb>

Hang is still observed

rusher commented 2 months ago

i mean something like :

    const connectionDetails = {
            host      : "yadamu-db1"
           ,user      : "root"
           ,password  : "oracle"
           ,port      : 3307
           ,database  : "mysql"
           ,multipleStatements: true
           ,typeCast:true
                   ,bigNumberStrings:true
                   ,dateStrings:true
                   ,rowsAsArray:true
, maxAllowedPacket: 16777216}

(set the right amount, 16777216 is the default example, but it must be at least that amount to solve the issue until next release, this won't have other impact and will solve the issue)

markddrake commented 2 months ago

Set to 1G and it appears to work.. However with a slightly different data set I got a broken connection...

C:\Development\YADAMU\src\scratch\mariadb>node -v
v20.7.0

C:\Development\YADAMU\src\scratch\mariadb>npm ls
yadamu@1.0.0 C:\Development\YADAMU\src
+-- @aws-sdk/client-s3@3.507.0
+-- @aws-sdk/lib-storage@3.507.0
+-- @azure/storage-blob@12.17.0
+-- @electron/remote@2.1.2
+-- bootstrap-icons@1.11.3
+-- bootstrap@5.3.2
+-- cookie-parser@1.4.6
+-- csv-parser@3.0.0
+-- electron-packager@17.1.2
+-- electron@27.3.1
+-- express-session@1.18.0
+-- express@4.18.2
+-- font-awesome@4.7.0
+-- ibm_db_electron@npm:ibm_db@3.2.3
+-- ibm_db@3.2.3
+-- install@0.13.0
+-- jquery@3.7.1
+-- mariadb@3.3.0
+-- mime-types@2.1.35
+-- mongodb@6.3.0
+-- mssql@10.0.2
+-- mysql@2.18.1
+-- npm@10.4.0
+-- oracledb@6.3.0
+-- pg-copy-streams@6.0.6
+-- pg-query-stream@4.5.3
+-- pg@8.11.3
+-- readable-stream@4.5.2
+-- snowflake-sdk@1.9.3
+-- uuid@9.0.1
`-- wkx@0.5.0

C:\Development\YADAMU\src\scratch\mariadb>type batchInsert.js
"use strict"
import mariadb from 'mariadb';
import fs from 'fs';

const MAX_ALLOWED_PACKET             = 1 * 1024 * 1024 * 1024;

const SQL_GET_MAX_ALLOWED_PACKET = `SELECT @@MAX_ALLOWED_PACKET`

const SQL_SET_MAX_ALLOWED_PACKET = `SET GLOBAL MAX_ALLOWED_PACKET=${MAX_ALLOWED_PACKET}`

async function main() {

    const connectionDetails = {
            host      : "yadamu-db1"
           ,user      : "root"
           ,password  : "oracle"
           ,port      : 3307
           ,database  : "mysql"
           ,multipleStatements: true
           ,typeCast:true
                   ,bigNumberStrings:true
                   ,dateStrings:true
                   ,rowsAsArray:true
                   ,maxAllowedPacket:MAX_ALLOWED_PACKET}

  let results;

  try {

    const pool = mariadb.createPool(connectionDetails);

        try {
      const conn = await pool.getConnection();
      let results = await conn.query(SQL_GET_MAX_ALLOWED_PACKET)
          const maxAllowedPacket = parseInt(results[0][0])
      if (maxAllowedPacket <  MAX_ALLOWED_PACKET) {

            console.log(`Increasing MAX_ALLOWED_PACKET to ${MAX_ALLOWED_PACKET}.`)
        results = await conn.query(SQL_SET_MAX_ALLOWED_PACKET)

          }
      conn.end()
    } catch (e) {
          console.log('MAX_ALLOWED_PACKET','Failed')
      console.log(e)
          return
    }

    const conn = await pool.getConnection();
    let results = await conn.query(SQL_GET_MAX_ALLOWED_PACKET)
    const maxAllowedPacket = parseInt(results[0][0])
    console.log(`MAX_ALLOWED_PACKET`,maxAllowedPacket)

    results = await conn.query(`SET AUTOCOMMIT = 0, TIME_ZONE = '+00:00',SESSION INTERACTIVE_TIMEOUT = 600000, WAIT_TIMEOUT = 600000, SQL_MODE='ANSI_QUOTES,PAD_CHAR_TO_FULL_LENGTH', GROUP_CONCAT_MAX_LEN = 1024000, GLOBAL LOCAL_INFILE = 'ON';`);
    results = await conn.query(`drop schema if exists "YADAMU_TEST";`);
    results = await conn.query(`create schema "YADAMU_TEST";`);

    results = await conn.query(`USE "YADAMU_TEST"`);
    results = await conn.query(`create table  if not exists "YADAMU_TEST"."character_types"(
  "character_col" char(1) ,"character_varying_col" longtext ,"character_max_col" mediumtext ,"character_varying_max_col" mediumtext ,"character_4000_col" varchar(4000) ,"character_varying_4000_col" varchar(4000) ,"character_64k_col" text ,"character_varying_64k_col" text ,"text_col" longtext)`);

    const testData = fs.readFileSync('data.json');
        const data = JSON.parse(testData)
        console.log('ROWS',data.length)

    const sql = `insert into "YADAMU_TEST"."character_types" ("character_col","character_varying_col","character_max_col","character_varying_max_col","character_4000_col","character_varying_4000_col","character_64k_col","character_varying_64k_col","text_col") values (?,?,?,?,?,?,?,?,?)`;

    try {

          results = await conn.beginTransaction()
          results = await conn.query(`SAVEPOINT YADAMU_INSERT; `)

          console.log('Insert')
      results = await conn.batch(sql,data);

          console.log('Commit')
          results = await conn.commit()
          console.log('Success')
    } catch (e) {
          console.log('Failed')
      console.log(e)
    }

   conn.end()
   pool.end();
 } catch(e) {
   console.log(e);
 }
}

main().then(() => {console.log("Complete")}).catch((e) => {console.log(e)})

C:\Development\YADAMU\src\scratch\mariadb>node batchInsert.js
MAX_ALLOWED_PACKET 1073741824
ROWS 4
Insert
Failed
Error: write ECONNRESET
    at afterWriteDispatched (node:internal/stream_base_commons:160:15)
    at writeGeneric (node:internal/stream_base_commons:151:3)
    at Socket._writeGeneric (node:net:952:11)
    at Socket._write (node:net:964:8)
    at writeOrBuffer (node:internal/streams/writable:399:12)
    at _write (node:internal/streams/writable:340:10)
    at Writable.write (node:internal/streams/writable:344:10)
    at socket.writeBuf (C:\Development\YADAMU\src\node_modules\mariadb\lib\connection.js:884:49)
    at PacketOutputStream.flushBufferBasic (C:\Development\YADAMU\src\node_modules\mariadb\lib\io\packet-output-stream.js:698:17)
    at PacketOutputStream.flushBufferStopAtMark (C:\Development\YADAMU\src\node_modules\mariadb\lib\io\packet-output-stream.js:685:10) {
  errno: -4077,
  code: 'ECONNRESET',
  syscall: 'write',
  fatal: true,
  sqlState: 'HY000'
}
Complete

data.zip

markddrake commented 2 months ago

The ECONRESET appears to be Windows specific ....

rusher commented 2 months ago

Thank you already for the clear test case. I was able to reproduce it. The exchange flows are nevertheless correct, that's interesting, need to study that in detail.

Orel-A commented 3 weeks ago

@rusher Hi, after this commit V3.3.1 I have a regression in my code resulting in Error: write ECONNRESET. My code is inserting a batch of data. How can I resolve this?