From: Archana Polampalli <archana.polampa...@windriver.com>

A vulnerability in Node.js HTTP servers allows an attacker to send a specially
crafted HTTP request with chunked encoding, leading to resource exhaustion and
denial of service (DoS). The server reads an unbounded number of bytes from a
single connection, exploiting the lack of limitations on chunk extension bytes.
The issue can cause CPU and network bandwidth exhaustion, bypassing standard
safeguards like timeouts and body size limits

Signed-off-by: Archana Polampalli <archana.polampa...@windriver.com>
---
 .../nodejs/nodejs/CVE-2024-22019.patch        | 241 ++++++++++++++++++
 .../recipes-devtools/nodejs/nodejs_16.20.2.bb |   1 +
 2 files changed, 242 insertions(+)
 create mode 100644 meta-oe/recipes-devtools/nodejs/nodejs/CVE-2024-22019.patch

diff --git a/meta-oe/recipes-devtools/nodejs/nodejs/CVE-2024-22019.patch 
b/meta-oe/recipes-devtools/nodejs/nodejs/CVE-2024-22019.patch
new file mode 100644
index 000000000..26fd2ff87
--- /dev/null
+++ b/meta-oe/recipes-devtools/nodejs/nodejs/CVE-2024-22019.patch
@@ -0,0 +1,241 @@
+From 03a5c34a829742f1c47b68f831b2940af44addf6 Mon Sep 17 00:00:00 2001
+From: Paolo Insogna <pa...@cowtech.it>
+Date: Wed, 3 Jan 2024 07:23:15 +0100
+Subject: [PATCH] http: add maximum chunk extension size
+
+PR-URL: https://github.com/nodejs-private/node-private/pull/518
+Fixes: https://hackerone.com/reports/2233486
+Reviewed-By: Matteo Collina <matteo.coll...@gmail.com>
+Reviewed-By: Marco Ippolito <marcoippolit...@gmail.com>
+Reviewed-By: Rafael Gonzaga <rafael.n...@hotmail.com>
+
+CVE-ID: CVE-2024-22019
+
+Upstream-Status: Backport 
[https://github.com/nodejs/node/commit/03a5c34a829742f]
+
+Signed-off-by: Archana Polampalli <archana.polampa...@windriver.com>
+---
+ lib/_http_server.js                           |   8 ++
+ src/node_http_parser.cc                       |  19 ++-
+ .../test-http-chunk-extensions-limit.js       | 131 ++++++++++++++++++
+ 3 files changed, 157 insertions(+), 1 deletion(-)
+ create mode 100644 test/parallel/test-http-chunk-extensions-limit.js
+
+diff --git a/lib/_http_server.js b/lib/_http_server.js
+index 4e23266..263bb52 100644
+--- a/lib/_http_server.js
++++ b/lib/_http_server.js
+@@ -706,6 +706,11 @@ const requestHeaderFieldsTooLargeResponse = Buffer.from(
+   `HTTP/1.1 431 ${STATUS_CODES[431]}\r\n` +
+   'Connection: close\r\n\r\n', 'ascii'
+ );
++const requestChunkExtensionsTooLargeResponse = Buffer.from(
++  `HTTP/1.1 413 ${STATUS_CODES[413]}\r\n` +
++  'Connection: close\r\n\r\n', 'ascii',
++);
++
+ function socketOnError(e) {
+   // Ignore further errors
+   this.removeListener('error', socketOnError);
+@@ -719,6 +724,9 @@ function socketOnError(e) {
+         case 'HPE_HEADER_OVERFLOW':
+           response = requestHeaderFieldsTooLargeResponse;
+           break;
++      case 'HPE_CHUNK_EXTENSIONS_OVERFLOW':
++          response = requestChunkExtensionsTooLargeResponse;
++          break;
+         case 'ERR_HTTP_REQUEST_TIMEOUT':
+           response = requestTimeoutResponse;
+           break;
+diff --git a/src/node_http_parser.cc b/src/node_http_parser.cc
+index 74f3248..a137fd7 100644
+--- a/src/node_http_parser.cc
++++ b/src/node_http_parser.cc
+@@ -79,6 +79,8 @@ const uint32_t kOnExecute = 5;
+ const uint32_t kOnTimeout = 6;
+ // Any more fields than this will be flushed into JS
+ const size_t kMaxHeaderFieldsCount = 32;
++// Maximum size of chunk extensions
++const size_t kMaxChunkExtensionsSize = 16384;
+
+ const uint32_t kLenientNone = 0;
+ const uint32_t kLenientHeaders = 1 << 0;
+@@ -206,6 +208,7 @@ class Parser : public AsyncWrap, public StreamListener {
+
+   int on_message_begin() {
+     num_fields_ = num_values_ = 0;
++    chunk_extensions_nread_ = 0;
+     url_.Reset();
+     status_message_.Reset();
+     header_parsing_start_time_ = uv_hrtime();
+@@ -443,9 +446,22 @@ class Parser : public AsyncWrap, public StreamListener {
+     return 0;
+   }
+
+-  // Reset nread for the next chunk
++  int on_chunk_extension(const char* at, size_t length) {
++    chunk_extensions_nread_ += length;
++
++    if (chunk_extensions_nread_ > kMaxChunkExtensionsSize) {
++      llhttp_set_error_reason(&parser_,
++          "HPE_CHUNK_EXTENSIONS_OVERFLOW:Chunk extensions overflow");
++      return HPE_USER;
++    }
++
++    return 0;
++  }
++
++  // Reset nread for the next chunk and also reset the extensions counter
+   int on_chunk_header() {
+     header_nread_ = 0;
++    chunk_extensions_nread_ = 0;
+     return 0;
+   }
+
+@@ -887,6 +903,7 @@ class Parser : public AsyncWrap, public StreamListener {
+   const char* current_buffer_data_;
+   bool pending_pause_ = false;
+   uint64_t header_nread_ = 0;
++  uint64_t chunk_extensions_nread_ = 0;
+   uint64_t max_http_header_size_;
+   uint64_t headers_timeout_;
+   uint64_t header_parsing_start_time_ = 0;
+diff --git a/test/parallel/test-http-chunk-extensions-limit.js 
b/test/parallel/test-http-chunk-extensions-limit.js
+new file mode 100644
+index 0000000..6868b3d
+--- /dev/null
++++ b/test/parallel/test-http-chunk-extensions-limit.js
+@@ -0,0 +1,131 @@
++'use strict';
++
++const common = require('../common');
++const http = require('http');
++const net = require('net');
++const assert = require('assert');
++
++// Verify that chunk extensions are limited in size when sent all together.
++{
++  const server = http.createServer((req, res) => {
++    req.on('end', () => {
++      res.writeHead(200, { 'Content-Type': 'text/plain' });
++      res.end('bye');
++    });
++
++    req.resume();
++  });
++
++  server.listen(0, () => {
++    const sock = net.connect(server.address().port);
++    let data = '';
++
++    sock.on('data', (chunk) => data += chunk.toString('utf-8'));
++
++    sock.on('end', common.mustCall(function() {
++      assert.strictEqual(data, 'HTTP/1.1 413 Payload Too Large\r\nConnection: 
close\r\n\r\n');
++      server.close();
++    }));
++
++    sock.end('' +
++      'GET / HTTP/1.1\r\n' +
++      'Host: localhost:8080\r\n' +
++      'Transfer-Encoding: chunked\r\n\r\n' +
++      '2;' + 'A'.repeat(20000) + '=bar\r\nAA\r\n' +
++      '0\r\n\r\n'
++    );
++  });
++}
++
++// Verify that chunk extensions are limited in size when sent in intervals.
++{
++  const server = http.createServer((req, res) => {
++    req.on('end', () => {
++      res.writeHead(200, { 'Content-Type': 'text/plain' });
++      res.end('bye');
++    });
++
++    req.resume();
++  });
++
++  server.listen(0, () => {
++    const sock = net.connect(server.address().port);
++    let remaining = 20000;
++    let data = '';
++
++    const interval = setInterval(
++      () => {
++        if (remaining > 0) {
++          sock.write('A'.repeat(1000));
++        } else {
++          sock.write('=bar\r\nAA\r\n0\r\n\r\n');
++          clearInterval(interval);
++        }
++
++        remaining -= 1000;
++      },
++      common.platformTimeout(20),
++    ).unref();
++
++    sock.on('data', (chunk) => data += chunk.toString('utf-8'));
++
++    sock.on('end', common.mustCall(function() {
++      assert.strictEqual(data, 'HTTP/1.1 413 Payload Too Large\r\nConnection: 
close\r\n\r\n');
++      server.close();
++    }));
++
++    sock.write('' +
++    'GET / HTTP/1.1\r\n' +
++    'Host: localhost:8080\r\n' +
++    'Transfer-Encoding: chunked\r\n\r\n' +
++    '2;'
++    );
++  });
++}
++
++// Verify the chunk extensions is correctly reset after a chunk
++{
++  const server = http.createServer((req, res) => {
++    req.on('end', () => {
++      res.writeHead(200, { 'content-type': 'text/plain', 'connection': 
'close', 'date': 'now' });
++      res.end('bye');
++    });
++
++    req.resume();
++  });
++
++  server.listen(0, () => {
++    const sock = net.connect(server.address().port);
++    let data = '';
++
++    sock.on('data', (chunk) => data += chunk.toString('utf-8'));
++
++    sock.on('end', common.mustCall(function() {
++      assert.strictEqual(
++        data,
++        'HTTP/1.1 200 OK\r\n' +
++        'content-type: text/plain\r\n' +
++        'connection: close\r\n' +
++        'date: now\r\n' +
++        'Transfer-Encoding: chunked\r\n' +
++        '\r\n' +
++        '3\r\n' +
++        'bye\r\n' +
++        '0\r\n' +
++        '\r\n',
++      );
++
++      server.close();
++    }));
++
++    sock.end('' +
++      'GET / HTTP/1.1\r\n' +
++      'Host: localhost:8080\r\n' +
++      'Transfer-Encoding: chunked\r\n\r\n' +
++      '2;' + 'A'.repeat(10000) + '=bar\r\nAA\r\n' +
++      '2;' + 'A'.repeat(10000) + '=bar\r\nAA\r\n' +
++      '2;' + 'A'.repeat(10000) + '=bar\r\nAA\r\n' +
++      '0\r\n\r\n'
++    );
++  });
++}
+--
+2.40.0
diff --git a/meta-oe/recipes-devtools/nodejs/nodejs_16.20.2.bb 
b/meta-oe/recipes-devtools/nodejs/nodejs_16.20.2.bb
index 16593a0fe..b786c0273 100644
--- a/meta-oe/recipes-devtools/nodejs/nodejs_16.20.2.bb
+++ b/meta-oe/recipes-devtools/nodejs/nodejs_16.20.2.bb
@@ -27,6 +27,7 @@ SRC_URI = "http://nodejs.org/dist/v${PV}/node-v${PV}.tar.xz \
            file://0001-mips-Use-32bit-cast-for-operand-on-mips32.patch \
            file://0001-Nodejs-Fixed-pipes-DeprecationWarning.patch \
            file://CVE-2022-25883.patch \
+           file://CVE-2024-22019.patch \
            "
 SRC_URI:append:class-target = " \
            file://0001-Using-native-binaries.patch \
-- 
2.40.0

-=-=-=-=-=-=-=-=-=-=-=-
Links: You receive all messages sent to this group.
View/Reply Online (#109004): 
https://lists.openembedded.org/g/openembedded-devel/message/109004
Mute This Topic: https://lists.openembedded.org/mt/104524912/21656
Group Owner: openembedded-devel+ow...@lists.openembedded.org
Unsubscribe: https://lists.openembedded.org/g/openembedded-devel/unsub 
[arch...@mail-archive.com]
-=-=-=-=-=-=-=-=-=-=-=-

Reply via email to