AWS setting how many long-polling concurrent requests Node.js can handle?

0

I'm setting up a new application for long-polling messages with interval of 10 sec from AWS sqs. I've tried to test it. And after 50 users that waiting their requests latency start growing and reach 15 seconds and reached 30 second with 150 users. Is it something wrong with my code or aws/node have some type of setting for it?

const port = process.env.PORT || 3001;
const express = require('express');
const app = express();
const AWS = require('aws-sdk');

AWS.config.update({region: 'eu-west-1'});

const MD5 = function(d){<md5function>}
const sleep = (waitTimeInMs) => new Promise(resolve => setTimeout(resolve, waitTimeInMs));
const SQS = new AWS.SQS({
  region: 'eu-west-1'
});
const LONG_POLL_TIMEOUT = 10;

async function checkQueue(req, res) {

  const {version, token} = req.params;
  const auth = req.query.auth;

  if (!isTokenValid(token, auth)) {
    await sleep(LONG_POLL_TIMEOUT * 1000);
    res.send()
  } else {
    getUpdateMessage(version, token, res);
  }
}

function getUpdateMessage(version, token, res) {
  const urlParams = {
    QueueName: `_version-queue-${version}-${token}`
  };

  SQS.getQueueUrl(urlParams, (urlErr, urlData) => {
    if (urlErr) {
      res.status(204).send();
    } else {
      const messageParams = {
        QueueUrl: urlData.QueueUrl,
        WaitTimeSeconds: LONG_POLL_TIMEOUT,
      };
      SQS.receiveMessage(messageParams, (err, data) => {
        if (err) {
          res.status(204).send();
        } else {
          if (data.Messages) {
            res.send(data.Messages[0].Body);
            SQS.deleteMessage({
              QueueUrl: urlData.QueueUrl,
              ReceiptHandle: data.Messages[0].ReceiptHandle
            }, (err1, data) => {
              if (err1) {
              }
            });
          } else {
            res.send();
          }
        }
      });
    }
  });
}

function isTokenValid(token, auth) {
  // check against tokens for last 14 days
  let dayNumber = Math.ceil(Date.now() / (24 * 3600 * 1000));
  for (let i = 0; i < 14; i++) {
    const stringToHash = `<string>`;
    if (MD5(stringToHash) == auth) {
      return true;
    }
    dayNumber--;
  }
  return false;
}
app.use(function(req, res, next) {
  res.header("Access-Control-Allow-Origin", "*");
  next();
});

app.get('/versions/:version/long_poll_updates/:token', function (req, res) {
  checkQueue(req, res);
});

app.get('/check', function (req, res) {
  res.send('I\'m ok!');
});

app.use((req, res) => {
  res.status(404).send("Sorry, that route doesn't exist. Have a nice day :)");
});

app.listen(port, () => {
  console.log('Server running at http://127.0.0.1:' + port + '/');
});

CPU Utilisation was less then 10 percent.

se0ga
asked 4 years ago794 views
2 Answers
0

Here's the detailed documentation:

https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/node-configuring-maxsockets.html

Quote:

When using the default of https, the SDK takes the maxSockets value from the globalAgent. If the maxSockets value is not defined or is Infinity, the SDK assumes a maxSockets value of 50.

You are most likely observing this setting kicking in.

Kuba
answered 4 years ago
profile picture
EXPERT
reviewed 22 days ago
0

Thank you, this setting solved the problem!

se0ga
answered 4 years ago

You are not logged in. Log in to post an answer.

A good answer clearly answers the question and provides constructive feedback and encourages professional growth in the question asker.

Guidelines for Answering Questions