grantila / fetch-h2

HTTP/1+2 Fetch API client for Node.js
MIT License
336 stars 16 forks source link

RangeError: Maximum call stack size exceeded #93

Open stefan-guggisberg opened 4 years ago

stefan-guggisberg commented 4 years ago

Running many (e.g. 1000) parallel requests to the same http2 origin causes:

RangeError: Maximum call stack size exceeded
(node:96007) UnhandledPromiseRejectionWarning: RangeError: Maximum call stack size exceeded
    at countWorking (/tmp/test/node_modules/already/dist/index.js:468:26)
    at freeSlots (/tmp/test/node_modules/already/dist/index.js:474:42)
    at shouldRetry (/tmp/test/node_modules/already/dist/index.js:497:26)
    at /tmp/test/node_modules/fetch-h2/dist/lib/context.js:196:72
    at /tmp/test/node_modules/already/dist/index.js:530:30
    at Try (/tmp/test/node_modules/already/dist/index.js:336:12)
    at runner (/tmp/test/node_modules/already/dist/index.js:530:20)
    at Object.resume (/tmp/test/node_modules/already/dist/index.js:514:34)
    at /tmp/test/node_modules/already/dist/index.js:482:18
    at Array.forEach (<anonymous>)

To reproduce:

Run node test.js 1000

test.js

const { fetch } = require('fetch-h2');

var args = process.argv.slice(2);
const N = args.length && !isNaN(parseInt(args[0])) ? parseInt(args[0]) : 100;

console.log(`running ${N} paralled requests...`)

const TEST_URL = 'https://httpbin.org/bytes/'; // HTTP2

(async function () {
  // generete array of 'randomized' urls
  const urls = Array.from({ length: N }, () => Math.floor(Math.random() * N)).map((num) => `${TEST_URL}${num}`);

  const ts0 = Date.now();

  // send requests
  const responses = await Promise.all(urls.map((url) => fetch(url)));

  // read bodies
  await Promise.all(responses.map((resp) => resp.arrayBuffer()));

  const ok = responses.filter((res) => res.ok);
  if (ok.length !== N) {
    console.log(`failed requests: ${N - ok.length}`);
  }

  const ts1 = Date.now();

  console.log(`Elapsed time: ${ts1 - ts0} ms`);
}());
duongvanba commented 4 years ago

Are there any solution to fix this ?

0xmaayan commented 1 year ago

same issue here! any known solution..?

stefan-guggisberg commented 1 year ago

@0xmaayan Have you tried https://github.com/adobe/fetch ?

import { fetch, reset } from '@adobe/fetch';

const main = async () => {
  const args = process.argv.slice(2);
  const N = args.length && !isNaN(parseInt(args[0])) ? parseInt(args[0]) : 100;

  console.log(`running ${N} paralled requests...`)

  const TEST_URL = 'https://httpbin.org/bytes/'; // HTTP2
  // generete array of 'randomized' urls
  const urls = Array.from({ length: N }, () => Math.floor(Math.random() * N)).map((num) => `${TEST_URL}${num}`);

  const ts0 = Date.now();

  // send requests
  const responses = await Promise.all(urls.map((url) => fetch(url)));

  // read bodies
  await Promise.all(responses.map((resp) => resp.arrayBuffer()));

  const ok = responses.filter((res) => res.ok);
  if (ok.length !== N) {
    console.log(`failed requests: ${N - ok.length}`);
  }

  const ts1 = Date.now();

  console.log(`Elapsed time: ${ts1 - ts0} ms`);
}

main()
  .catch(console.error)
  .finally(reset);