infinitered / nsfwjs

NSFW detection on the client-side via TensorFlow.js
https://nsfwjs.com/
MIT License
7.89k stars 524 forks source link

[Node.JS] Memory leak - Server reaches 100% memory after 5 or 6 days #741

Open Madriix opened 1 year ago

Madriix commented 1 year ago

Hi

With the http server below, my server with 32 GB of memory almost reaches 100% of memory used after 4 to 6 days, do you know where the problem could come from?

In the browser just type this: http://x.x.x.x:3000/?url=https://test.domain.com/images.jpg

const http = require('http');
const https = require('https');
const tf = require('@tensorflow/tfjs-node');
const nsfw = require('nsfwjs');
const url1 = require('url');

const server = http.createServer(async (req, res) => {

  const parsedUrl = url1.parse(req.url, true);
  const urlValue = parsedUrl.query.url;

  console.log(urlValue);

  if (typeof urlValue !== "undefined") {
    try {
      const url = new URL(urlValue);
      const client = (url.protocol == "https:") ? https : (url.protocol == "http:") ? http : http;

      const requestTimeout = setTimeout(() => {
        res.writeHead(408, {'Content-Type': 'text/plain'});
        res.end('Request Timeout');
      }, 5000); // Set a timeout of 5 seconds for the request

      const response = await new Promise((resolve, reject) => {
        client.get(url, resolve).on('error', reject);
      });

      const chunks = [];
      response.on('data', (chunk) => chunks.push(chunk));

      await new Promise((resolve, reject) => {
        response.on('end', resolve).on('error', reject);
      });

      clearTimeout(requestTimeout); // Clear the timeout if the request completes before the timeout is reached
      const buffer = Buffer.concat(chunks);
      const model = await nsfw.load(); // To load a local model, nsfw.load('file://./path/to/model/')
      // Image must be in tf.tensor3d format
      // you can convert image to tf.tensor3d with tf.node.decodeImage(Uint8Array,channels)
      const image = await tf.node.decodeImage(buffer, 3);
      const predictions = await model.classify(image);
      image.dispose(); // Tensor memory must be managed explicitly (it is not sufficient to let a tf.Tensor go out of scope for its memory to be released).
      console.log(predictions);
      res.writeHead(200, {
        'Content-Type': 'application/json'
      });
      res.end(JSON.stringify(predictions));
    } catch(e) {
      console.error(e);
      res.writeHead(400, {'Content-Type': 'text/plain'});
      res.end('Bad Request : '+e);
    }
  } else {
    res.writeHead(400, {'Content-Type': 'text/plain'});
    res.end('Bad Request (2)');
  }

});

server.listen(3000, () => {
  console.log('The server is listening on port 3000...');
});

It's a pity that there is a memory leak because the detection works well

Madriix commented 1 year ago

Additional info: Each time an image is scanned, the memory increases by approximately 150 MB and it never decreases, same with express it's the same

GantMan commented 1 year ago

Are you loading the model over and over with no disposal?

El El mar, may. 2, 2023 a la(s) 1:27 p.m., Madrix @.***> escribió:

Additional info: Each time an image is scanned, the memory increases by approximately 150 MB and it never decreases, same with express it's the same

— Reply to this email directly, view it on GitHub https://github.com/infinitered/nsfwjs/issues/741#issuecomment-1531949411, or unsubscribe https://github.com/notifications/unsubscribe-auth/AAHTOJO5FW7GYFEGIMNRKX3XEFG2XANCNFSM6AAAAAAXPOKVIU . You are receiving this because you are subscribed to this thread.Message ID: @.***>

Madriix commented 1 year ago

@GantMan you mean that "const url = new URL(urlValue);" I have to delete it like for example:

let url = new URL(urlValue);
....
url = null;

Will it work?

Madriix commented 1 year ago

It seems to work with this code: https://github.com/infinitered/nsfwjs/blob/master/example/node_demo/server.js

I modified it a little because the one put in demo it did not work for me:

const express = require('express')
const multer = require('multer')
const jpeg = require('jpeg-js')

const tf = require('@tensorflow/tfjs-node');
const nsfw = require('nsfwjs');

const app = express()
const upload = multer()

let _model

const convert = async (img) => {
  // Decoded image in UInt8 Byte array
  const image = await jpeg.decode(img, true)

  const numChannels = 3
  const numPixels = image.width * image.height
  const values = new Int32Array(numPixels * numChannels)

  for (let i = 0; i < numPixels; i++)
    for (let c = 0; c < numChannels; ++c)
      values[i * numChannels + c] = image.data[i * 4 + c]

  return tf.tensor3d(values, [image.height, image.width, numChannels], 'int32')
}

app.get('/', async (req, res) => {
  if (!req.query.url)
    res.status(400).send("Missing image multipart/form-data")
  else {
    try {
      const url = new URL(req.query.url);
      const client = (url.protocol == "https:") ? require('https') : require('http');

      console.log(req.query.url);

      const response = await new Promise((resolve, reject) => {
        client.get(url, resolve).on('error', reject);
      });

      const chunks = [];
      response.on('data', (chunk) => chunks.push(chunk));

      await new Promise((resolve, reject) => {
        response.on('end', resolve).on('error', reject);
      });

      const buffer = Buffer.concat(chunks)
      //const image = await convert(req.query.url)
      //const predictions = await _model.classify(image)
      const image = await tf.node.decodeImage(buffer, 3)
      const predictions = await _model.classify(image)
      image.dispose()
      res.json(predictions)
      console.log(predictions)
    }
    catch (e) {
      console.error(e);
      res.writeHead(400, {
        'Content-Type': 'text/plain'
      });
      res.end('Bad Request : ' + e);
    }

  }
})

const load_model = async () => {
  _model = await nsfw.load()
}

// Keep the model in memory, make sure it's loaded only once
load_model().then(() => app.listen(3000))

After a few days, it takes 800MB of memory, at the start it is 150MB of memory, it seems that there is still a memory leak but it goes up very slowly, at least it does not go up to 30GB in 4 days

johnhom1024 commented 1 year ago

I had the same problem with memory, everytime when I upload image, program will consume more and more memory.

abdullahIsa commented 5 months ago

Hello i am having this too any solution ?