dherault / serverless-offline

Emulate AWS λ and API Gateway locally when developing your Serverless project
MIT License
5.16k stars 794 forks source link

MaxListenersExceededWarning with esbuild #1753

Open tgdn opened 5 months ago

tgdn commented 5 months ago

Bug Report

Current Behavior

Running serverless-offline with serverless-esbuild using NODE_OPTIONS="--trace-warnings" serverless offline start leads to the following behavior during development:

Whenever a file is updated my machine freezes and I get the following warning:

(node:38275) MaxListenersExceededWarning: Possible EventEmitter memory leak detected. 11 exit listeners added to [Worker]. Use emitter.setMaxListeners() to increase limit
    at _addListener (node:events:591:17)
    at Worker.addListener (node:events:609:10)
    at Worker.once (node:events:653:8)
    at node:internal/worker:377:12
    at new Promise (<anonymous>)
    at Worker.terminate (node:internal/worker:376:12)
    at WorkerThreadRunner.cleanup (file:///repository/node_modules/serverless-offline/src/lambda/handler-runner/worker-thread-runner/WorkerThreadRunner.js:30:31)
    at HandlerRunner.cleanup (file:///repository/node_modules/serverless-offline/src/lambda/handler-runner/HandlerRunner.js:105:25)
    at LambdaFunction.cleanup (file:///repository/node_modules/serverless-offline/src/lambda/LambdaFunction.js:220:31)
    at file:///repository/node_modules/serverless-offline/src/lambda/LambdaFunctionPool.js:63:41
    at Set.forEach (<anonymous>)
    at file:///repository/node_modules/serverless-offline/src/lambda/LambdaFunctionPool.js:62:23
    at Map.forEach (<anonymous>)
    at #cleanupPool (file:///repository/node_modules/serverless-offline/src/lambda/LambdaFunctionPool.js:61:16)
    at LambdaFunctionPool.cleanup (file:///repository/node_modules/serverless-offline/src/lambda/LambdaFunctionPool.js:76:28)
    at Lambda.cleanup (file:///repository/node_modules/serverless-offline/src/lambda/Lambda.js:66:37)
    at #cleanupFunctions (file:///repository/node_modules/serverless-offline/src/ServerlessOffline.js:142:26)
    at PluginManager.runHooks (/repository/node_modules/serverless/lib/classes/plugin-manager.js:530:15)
    at PluginManager.invoke (/repository/node_modules/serverless/lib/classes/plugin-manager.js:564:20)
    at process.processTicksAndRejections (node:internal/process/task_queues:95:5)
    at async PluginManager.spawn (/repository/node_modules/serverless/lib/classes/plugin-manager.js:585:5)

Sample Code

service: api
frameworkVersion: "3"

provider:
  name: aws
  deploymentMethod: direct
  region: eu-west-1
  architecture: arm64
  memorySize: 256
  stage: dev
  timeout: 30
  runtime: nodejs18.x

plugins:
  - serverless-esbuild
  - serverless-lift
  - serverless-offline

custom:
  serverless-offline:
    httpPort: 4000
    useChildProcesses: true
  esbuild:
    config: "./esbuild.config.cjs"

constructs:
  sendEmailWorkerQueue:
    type: queue
    worker:
      handler: src/sendEmail.handler

functions:
  trpc:
    handler: src/trpc.handler
    events:
      - http:
          path: /trpc
          method: ANY
          cors:
            origin: "*"
            headers: "*"
            allowCredentials: true
      - http:
          path: /trpc/{path+}
          method: ANY
          cors:
            origin: "*"
            headers: "*"
            allowCredentials: true
module.exports = () => ({
  format: "esm",
  target: "esnext",
  platform: "node",
  packager: "pnpm",
  loader: { ".node": "copy", ".js": "jsx" },
  bundle: true,
  minify: process.env.NODE_ENV === "production",
  minifyWhitespace: process.env.NODE_ENV === "production",
  minifySyntax: process.env.NODE_ENV === "production",
  minifyIdentifiers: false,
  sourcemap: process.env.NODE_ENV === "production",
  keepNames: true,
  treeShaking: process.env.NODE_ENV === "production",
  outputFileExtension: ".mjs",
  banner: {
    js: `
// BANNER START
const require = (await import("node:module")).createRequire(import.meta.url);
const __filename = (await import("node:url")).fileURLToPath(import.meta.url);
const __dirname = (await import("node:path")).dirname(__filename);
// BANNER END
`,
  },
  watch: {
    pattern: [
      "./**/*.(js|ts)",
      "../../packages/api/**/*.(js|ts)",
      "../../packages/db/**/*.(ts|js)",
      "../../packages/emails/**/*.(ts|js|jsx|tsx)",
      "!node_modules/@aws-sdk/**",
    ],
    ignore: [
      "node_modules",
      ".turbo",
      ".esbuild",
      ".build",
      "dist",
      ".serverless",
      "**/apps/webapp/**",
      "**/packages/ui/**",
    ],
  },
});
import { awsLambdaRequestHandler } from "@trpc/server/adapters/aws-lambda";
import type { APIGatewayProxyEvent, APIGatewayProxyResult } from "aws-lambda";

import { appRouter, createAwsTrpcContext } from "@acme/api";

export const handler = async (
  event: APIGatewayProxyEvent,
): Promise<APIGatewayProxyResult> => {
  console.log(event);
  return {
    body: "ok",
    statusCode: 200,
  };
  // return trpcHandler(event, context);
};

const trpcHandler = awsLambdaRequestHandler({
  router: appRouter,
  createContext: createAwsTrpcContext,
});

Expected behavior/code

I wouldn't expect to see this warning. My machine shouldn't freeze every time I hit save.

Environment

I'm using PNPM with workspaces and Turborepo.