oracle / node-oracledb

Oracle Database driver for Node.js maintained by Oracle Corp.
http://oracle.github.io/node-oracledb/
Other
2.26k stars 1.08k forks source link

Long running Query does not resolve #1242

Closed Enado95 closed 4 years ago

Enado95 commented 4 years ago

See i have a query that returns historical information over a 12 month period for a specific customer. When I run in sql developer it takes between 10 - 12 seconds. However, when i run it from my code it just hangs and does not return any result. I have UV_THREADPOOL_SIZE=1024 which should be enough. See code below


/**Function that use to execute my queries*/
exports.executeTableFunction = async (sql, binds, options, returnKey, pool) => {
  return new Promise(async (resolve, reject) => {
    let connection
    try {
      connection = await oracledb.getConnection({ poolAlias: pool })
      const result = await connection.execute(sql, binds, options)
      if (result.rows.length !== 0) {
        resolve({ [returnKey]: result.rows })
      } else {
        resolve({ [returnKey]: [] })
      }
    } catch (error) {
      reject(error)
    } finally {
      if (connection) {
        try {
          await connection.close()
        } catch (error) {
          console.log(error)
        }
      }
    }
  })
}

I create a function to execute the query

/**how execute the query*/
exports.getDelinquency = (req, res) => {
  return new Promise(async (resolve, reject) => {
    const ssn= req.body.ssn
    let returnKey = 'eodDelinquency'
    const months = parseInt(req.body.months) || 12
    const pool = process.env.PRIME_POOL_ALIAS

    try {
      const sql = `select accountNumber,accountStatus, 
        TO_CHAR(ASATDATE, 'YYYYMM') as "asAtDate", accountType, ssn
        from table(history.deliquencyHistory(:id, :months))`
      const binds = [ssn, months]
      const options = { outFormat: oracledb.OUT_FORMAT_OBJECT }

      const result = await executeTableFunction(
        sql,
        binds,
        options,
        returnKey,
        pool
      ).catch(err => {
        throw err
      })
      resolve(result)
    } catch (error) {
      reject(error)
    }
  })
}

I then call the getDelinquncy function in a controller where i execute other queries. N.B I've removed the other and left only the getDelinquncy that's how i found it was the issue.

Here's how a call the function:

const { getDelinquency } = require('./historyController')

exports.generateScore = async (req, res, next) => {
  try {
    const Delinquency = getDelinquency(req).catch(err => {
      throw err
    })

   const data1 = await EodDelinquency
   return res.send(data1)
  } catch (err) {
    console.error(err)
    return res.status(500).send(err.message)
  }
}

Please assist with this issue.

cjbj commented 4 years ago
Enado95 commented 4 years ago

@cjbj I created a standalone example below and the result remain the same. Tested with faster queries and got back results but no results from the historical query that execute in 10-12 seconds max.

const oracledb = require("oracledb");

oracledb.outFormat = oracledb.OUT_FORMAT_OBJECT;

async function run() {
  let connection;

  try {
    connection = await oracledb.getConnection({
      user: "example",
      password: "examplePassward4321*",
      connectString:
        "(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=11.123.45.678)(PORT=1234))(CONNECT_DATA=(SERVER=DEDICATED)(SID=mysid)))"
    });

    const result = await connection.execute(
      `select accountNumber,accountStatus, 
        TO_CHAR(ASATDATE, 'YYYYMM') as "asAtDate", accountType, ssn
        from table(history.deliquencyHistory(:id, :months))`,
      ['123456789', 12] // bind value for :id
    );

    let data = result.rows;
    console.log(data);

  } catch (err) {
    console.error(err);
  } finally {
    if (connection) {
      try {
        await connection.close();
      } catch (err) {
        console.error(err);
      }
    }
  }
}

run();
cjbj commented 4 years ago

Can you give the SQL to create history.deliquencyHistory?

Have you tried other variants of the query? Try creating a dummy package and query that.

Enado95 commented 4 years ago

@cjbj yes i have tried query other packages. Even another function within that same package. Unfortunately I can't give you SQL as it is confidential.

cjbj commented 4 years ago

My best guess is its something in the history.deliquencyHistory. That's all I can say. If its a pipelined table returning a single row, then you may have hit https://github.com/oracle/odpi/issues/73 Let us know what you find, since we won't be able to reproduce your problem without a runnable testcase.

Enado95 commented 4 years ago

@cjbj i will edit the query and so you can access it tomorrow. But it’s not returning a single row(unless I don’t understand that odpi issue ).

Enado95 commented 4 years ago

@cjbj please see query below, logic is the same. N:B: All my other queries are structured like this, they are doing less work and as such, they are faster.

FUNCTION deliquencyHistory (SSN IN VARCHAR, MONTHS IN INT)RETURN EOD_DQ_TABLE PIPELINED
AS
  MAX_END_DATE DATE;
  START_DATE DATE;
  test_table EOD_DQ_ROWS;
BEGIN

    select max(billingdate) into max_end_date  from statements stat join accounts acc on acc.id = stat.accountId
  where exists 
  (select 1 from  cards crd  inner join customers ppl on ppl.id = crd.customeId
    where ppl.SSN = SSN and crd.accountId = acc.id ) ;
  SELECT ADD_MONTHS(max_end_date, MONTHS*-1) into start_date from dual;

FOR x IN (
select
acc.accountId,
acc.ssn,
acc.accountType,
acc.status,
acc.billingdate
from
(

  SELECT accountId, billingdate, status, acc.accountType,ssn,row_number() over (partition by accountId order by sequence desc, billingdate desc) row_num
  FROM tctdbs.cstatements cs
  inner join (select account, accountType from accounts) ac on acc.accountId = stat.accountId
  where billingdate >=start_date and billingdate <= max_end_date
  and exists (select 1 from  cards crd  inner join customers ppl on ppl.id = crd.customerId 
    where ppl.SSN = SSN and crd.accountId = ac.accountId ) 
    and SEQUENCE =(SELECT MAX(SEQUENCE) FROM statements cst where cst.id=stat.id and billingdate <=max_end_date)
) ca
)

LOOP
test_table.accountNumber:=x.accountId;
test_table.accountStatus := x.status;
test_table.asAtDate:= x.billingdate;
test_table.accountType:= x.accountType;
test_table.ssn :=x.ssn;

PIPE ROW (test_table); 
END LOOP;
RETURN;

END  deliquencyHistory;
cjbj commented 4 years ago

Two things come to mind

Enado95 commented 4 years ago

I’ll write some create table statements and some insert statements in the morning.

On Mon, Apr 20, 2020 at 11:22 PM Christopher Jones notifications@github.com wrote:

Two things come to mind

-

your query really is slow, but your SQL Developer test is somehow invalid (perhaps you didn't connect to the same DB - we see this all the time!). You could tune the SQL, return fewer rows (with a WHERE clause), tune fetchArraySize etc.

You are using a PIPE ROW, so it may be the ODPI-C issue I mentioned. Since I don't have your tables, I can't test your code. You should create some dummy tables and create a package like the one you posted that uses those tables. See if it reproduces.

— You are receiving this because you authored the thread. Reply to this email directly, view it on GitHub https://github.com/oracle/node-oracledb/issues/1242#issuecomment-616943047, or unsubscribe https://github.com/notifications/unsubscribe-auth/AGE5KHUM2VFEJO6PHZWIR7LRNUNRFANCNFSM4MK7ZMQA .

Enado95 commented 4 years ago

@cjbj I've been a bit occupied but I've tried tuning oracledb.fetchArraySize = 200; as well as prefetchRows and maxRows and it hasn't helped thus far. I'll attempt to send you dummy create table statements while maintaining the integrity of the actual table structure asap. It's a bit challenging since its large tables.

cjbj commented 4 years ago

Another thing to try is modifying the node-oracledb source code and rebuilding. Follow the installation instructions. In odpi/src/dpiImpl.h, change the default prefetch to 0:

// define number of rows to prefetch
#define DPI_PREFETCH_ROWS_DEFAULT                   0
Enado95 commented 4 years ago

Will try. Not sure how this will work going forward in our current CI/CD flow though.

cjbj commented 4 years ago

@Enado95 instead of rebuilding code, you can probably create an oraaccess.xml file and set the prefetch to 0. See Optional Oracle Client Configuration.

Enado95 commented 4 years ago

@cjbj Will explore. However, looking at the docs the file would have to be placed in the database configuration directory and i don't have access to such location. Its an enterprise wide database so this might be a challenge. But if the function returns JSON vs a table does it improves performance?

cjbj commented 4 years ago

It is 'client side' and is is placed on the Node.js computer. Various locations are shown in the doc; or you can set TNS_ADMIN. (Which bit of the doc do I need to update to make this clearer?)

Enado95 commented 4 years ago

@cjbj Sorry i had just glanced before posting. Will attempt to put in the source directory of the project and set the TNS-ADMIN variable to the location (I don’t have access to the server where docker is ran so I can’t copy the files when building the images to the default locations on the mentioned in the docs. A custom image with the dependencies was created for me to use.). One more thing, should I keep oracledb.fetchArraySize = 200; as mentioned earlier or just do the prefetch configurations as recommended?

cjbj commented 4 years ago

@Enado95 start by keeping your current fetchArraySize value.

The suggestion about using an oraaccess.xml file is just to test the effect of changing the prefetch size to try and narrow down the root cause of your problem. You may or may not find that a prefetch size of 0 has other performance impacts, particularly by increasing 'round-trips' between node-oracledb and the DB.

Enado95 commented 4 years ago

@cjbj The fetchArraySize =200 is working. Will monitor.

Enado95 commented 4 years ago

@Enado95 instead of rebuilding code, you can probably create an oraaccess.xml file and set the prefetch to 0. See Optional Oracle Client Configuration.

How can check if this was set? The issue is happening again (works in sql developer but not in code). I check the oracledb.prefetchRows and it returns undefined. I created this folder etc\my-oracle-config\ and added oraaccess.xml in the project directory.

cjbj commented 4 years ago

Did you set TNS_ADMIN to the directory containing the file?

Try adding a deliberate mistake with tag names in the file and you should get an error when you run Node.js, e.g.:

Error: ORA-24296: error in processing the XML configuration file oraaccess.xml
LPX-00225: end-element tag "rowsx" does not match start-element tag "rows"

There is no oracledb.prefetchRows in node-oracledb 4.2.

Enado95 commented 4 years ago

Didn't throw the error see folder structure below. The env variable is TNS_ADMIN=/etc/my-oracle-config image

xml file content

 <oraaccess xmlns="http://xmlns.oracle.com/oci/oraaccess"
  xmlns:oci="http://xmlns.oracle.com/oci/oraaccess"
  schemaLocation="http://xmlns.oracle.com/oci/oraaccess
  http://xmlns.oracle.com/oci/oraaccess.xsd">
  <default_parameters>
    <prefetch>
      <rowasas>0</rodeews>
    </prefetch>
  </default_parameters>
</oraaccess>
cjbj commented 4 years ago

Thanks for your perseverance. Keep trying.

This file:

process.env.TNS_ADMIN='/Users/cjones/n/testoraaccess';  // Your file is in this directory

const oracledb = require('oracledb');
const config = require('./dbconfig.js');

async function run() {
  try {
    let connection = await oracledb.getConnection(config);
    let result = await connection.execute(`SELECT * FROM DUAL`);
    console.log(result);
  } catch (err) {
    console.error(err);
  }
}

run();

Gives:

Error: ORA-24296: error in processing the XML configuration file oraaccess.xml
LPX-00225: end-element tag "rodeews" does not match start-element tag "rowasas"
Enado95 commented 4 years ago

Works. The path was missing a dot. So instead of TNS_ADMIN=/etc/my-oracle-config it was TNS_ADMIN=./etc/my-oracle-config. Will continue to monitor again

Enado95 commented 4 years ago

@cjbj The fetchArraySize =200 is working. Will monitor.

@cjbj This is working fine in production. We can close this. Thanks.

cjbj commented 4 years ago

I wish we had found the cause but I'll close this as requested. Let us know if it reappears.

Note that the 5.0.0-dev code in the master branch now has an attribute to change the prefetch value, so playing with oraaccess.xml is/will be obsolete.