farhadi / node-smpp

SMPP client and server implementation in node.js
MIT License
417 stars 177 forks source link

Implement TPS as Client #219

Closed guicuton closed 1 year ago

guicuton commented 2 years ago

Hi guys,

I'm using one provider that set for me a TPS of 50 sms/s. The lib has a way to handle it or i'll need to create a custom method?

roughly, i've been thinking something like:

let smsCount = 0;

submit.sm(..., (pdu) => {
   smsCount++;
});

if(smsCount === 50) {
    smsCount = 0;
    setTimeout((), 1000);
}

After 50 messages sent, apply the timeout and restart the counter. The idea to create this it's because i've multiple messages being rejected by the provider due TPS limitation.

This does work or i'm completly wrong?

Note: The code it's just a idea of what i'm trying to achieve

tapan-thapa commented 2 years ago

May be you can try async.eachLimit.

https://caolan.github.io/async/v3/docs.html#eachLimit

guicuton commented 2 years ago

May be you can try async.eachLimit.

https://caolan.github.io/async/v3/docs.html#eachLimit

Hi @tapan-thapa thank you for your suggestion, but are you able to provide a sample how do I would use It here?

tapan-thapa commented 2 years ago

Here is the sample. Not for node smpp but take idea from this.

` require('dotenv').config(); const csv = require('csv-parser'); const fs = require('fs'); const axios = require('axios'); const results = []; const createCsvWriter = require('csv-writer').createObjectCsvWriter; const csvWriter = createCsvWriter({ path: process.env.DESTINATION_PATH, header: [ { id: 'url', title: 'URL' }, { id: 'statuscode', title: 'STATUSCODE' }, { id: 'device', title: 'DEVICE' }, ], }); var asyncModule = require('async');

fs.createReadStream(process.env.SOURCE_PATH) .pipe(csv()) .on('data', (data) => results.push(data)) .on('end', () => { asyncModule.eachOfLimit(results, 3, callAPI, function (err) { if (err) { console.log(err); } else { console.log('All records processed successfully'); } });

function callAPI(result, key, cb) {
  let encodeSrcUrl = encodeURI(result.URL);
  axios
    .head(encodeSrcUrl, {
      headers: { 'User-Agent': process.env.USER_AGENT_M },
    })
    .then(async function (response) {
      let record = [
        {
          url: result.URL,
          statuscode: response.status,
          device: 'Mobile',
        },
      ];
      // handle success
      //console.log(response);
      console.log(key, result.URL, response.status, 'Mobile');
      await csvWriter.writeRecords(record);
    })
    .catch(async function (error) {
      let record = [
        {
          url: result.URL,
          statuscode: error.message,
          device: 'Mobile',
        },
      ];
      // handle error
      //console.log(error);
      console.log(key, result.URL, error.message, 'Mobile');
      await csvWriter.writeRecords(record);
    })
    .then(function () {
      axios
        .head(result.URL, {
          headers: { 'User-Agent': process.env.USER_AGENT_D },
        })
        .then(async function (response) {
          let record = [
            {
              url: result.URL,
              statuscode: response.status,
              device: 'Desktop',
            },
          ];
          // handle success
          //console.log(response);
          console.log(key, result.URL, response.status, 'Desktop');
          await csvWriter.writeRecords(record);
        })
        .catch(async function (error) {
          let record = [
            {
              url: result.URL,
              statuscode: error.message,
              device: 'Desktop',
            },
          ];
          // handle error
          //console.log(error);
          console.log(key, result.URL, error.message, 'Desktop');
          await csvWriter.writeRecords(record);
        })
        .then(function () {
          cb();
        });
    });
}

});

`

tapan-thapa commented 2 years ago

One more idea.

You can put your messages in bullmq at any speed then pick at your desired speed at worker code.

https://docs.bullmq.io/guide/rate-limiting

guicuton commented 1 year ago

Im using rabbitmq and added a timeout before ack each message. It's working great!

Thank you all