AWS setting how many long-polling concurrent requests Node.js can handle?

0

I'm setting up a new application for long-polling messages with interval of 10 sec from AWS sqs. I've tried to test it. And after 50 users that waiting their requests latency start growing and reach 15 seconds and reached 30 second with 150 users. Is it something wrong with my code or aws/node have some type of setting for it?

const port = process.env.PORT || 3001;
const express = require('express');
const app = express();
const AWS = require('aws-sdk');

AWS.config.update({region: 'eu-west-1'});

const MD5 = function(d){<md5function>}
const sleep = (waitTimeInMs) => new Promise(resolve => setTimeout(resolve, waitTimeInMs));
const SQS = new AWS.SQS({
  region: 'eu-west-1'
});
const LONG_POLL_TIMEOUT = 10;

async function checkQueue(req, res) {

  const {version, token} = req.params;
  const auth = req.query.auth;

  if (!isTokenValid(token, auth)) {
    await sleep(LONG_POLL_TIMEOUT * 1000);
    res.send()
  } else {
    getUpdateMessage(version, token, res);
  }
}

function getUpdateMessage(version, token, res) {
  const urlParams = {
    QueueName: `_version-queue-${version}-${token}`
  };

  SQS.getQueueUrl(urlParams, (urlErr, urlData) => {
    if (urlErr) {
      res.status(204).send();
    } else {
      const messageParams = {
        QueueUrl: urlData.QueueUrl,
        WaitTimeSeconds: LONG_POLL_TIMEOUT,
      };
      SQS.receiveMessage(messageParams, (err, data) => {
        if (err) {
          res.status(204).send();
        } else {
          if (data.Messages) {
            res.send(data.Messages[0].Body);
            SQS.deleteMessage({
              QueueUrl: urlData.QueueUrl,
              ReceiptHandle: data.Messages[0].ReceiptHandle
            }, (err1, data) => {
              if (err1) {
              }
            });
          } else {
            res.send();
          }
        }
      });
    }
  });
}

function isTokenValid(token, auth) {
  // check against tokens for last 14 days
  let dayNumber = Math.ceil(Date.now() / (24 * 3600 * 1000));
  for (let i = 0; i < 14; i++) {
    const stringToHash = `<string>`;
    if (MD5(stringToHash) == auth) {
      return true;
    }
    dayNumber--;
  }
  return false;
}
app.use(function(req, res, next) {
  res.header("Access-Control-Allow-Origin", "*");
  next();
});

app.get('/versions/:version/long_poll_updates/:token', function (req, res) {
  checkQueue(req, res);
});

app.get('/check', function (req, res) {
  res.send('I\'m ok!');
});

app.use((req, res) => {
  res.status(404).send("Sorry, that route doesn't exist. Have a nice day :)");
});

app.listen(port, () => {
  console.log('Server running at http://127.0.0.1:' + port + '/');
});

CPU Utilisation was less then 10 percent.

se0ga
已提問 5 年前檢視次數 802 次
2 個答案
0

Here's the detailed documentation:

https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/node-configuring-maxsockets.html

Quote:

When using the default of https, the SDK takes the maxSockets value from the globalAgent. If the maxSockets value is not defined or is Infinity, the SDK assumes a maxSockets value of 50.

You are most likely observing this setting kicking in.

Kuba
已回答 5 年前
profile picture
專家
已審閱 1 個月前
0

Thank you, this setting solved the problem!

se0ga
已回答 5 年前

您尚未登入。 登入 去張貼答案。

一個好的回答可以清楚地回答問題並提供建設性的意見回饋,同時有助於提問者的專業成長。

回答問題指南