diff --git a/deps/http_parser/http_parser.c b/deps/http_parser/http_parser.c index 6522618671d09c..46764bced09478 100644 --- a/deps/http_parser/http_parser.c +++ b/deps/http_parser/http_parser.c @@ -25,6 +25,8 @@ #include <string.h> #include <limits.h> +static uint32_t max_header_size = HTTP_MAX_HEADER_SIZE; + #ifndef ULLONG_MAX # define ULLONG_MAX ((uint64_t) -1) /* 2^64-1 */ #endif @@ -137,20 +139,20 @@ do { \ } while (0) /* Don't allow the total size of the HTTP headers (including the status - * line) to exceed HTTP_MAX_HEADER_SIZE. This check is here to protect + * line) to exceed max_header_size. This check is here to protect * embedders against denial-of-service attacks where the attacker feeds * us a never-ending header that the embedder keeps buffering. * * This check is arguably the responsibility of embedders but we're doing * it on the embedder's behalf because most won't bother and this way we - * make the web a little safer. HTTP_MAX_HEADER_SIZE is still far bigger + * make the web a little safer. max_header_size is still far bigger * than any reasonable request or response so this should never affect * day-to-day operation. */ #define COUNT_HEADER_SIZE(V) \ do { \ parser->nread += (V); \ - if (UNLIKELY(parser->nread > (HTTP_MAX_HEADER_SIZE))) { \ + if (UNLIKELY(parser->nread > max_header_size)) { \ SET_ERRNO(HPE_HEADER_OVERFLOW); \ goto error; \ } \ @@ -1471,7 +1473,7 @@ size_t http_parser_execute (http_parser *parser, const char* p_lf; size_t limit = data + len - p; - limit = MIN(limit, HTTP_MAX_HEADER_SIZE); + limit = MIN(limit, max_header_size); p_cr = (const char*) memchr(p, CR, limit); p_lf = (const char*) memchr(p, LF, limit); @@ -2437,3 +2439,8 @@ http_parser_version(void) { HTTP_PARSER_VERSION_MINOR * 0x00100 | HTTP_PARSER_VERSION_PATCH * 0x00001; } + +void +http_parser_set_max_header_size(uint32_t size) { + max_header_size = size; +} diff --git a/deps/http_parser/http_parser.h b/deps/http_parser/http_parser.h index 1fbf30e2b4740b..ea7bafef2c3178 100644 --- a/deps/http_parser/http_parser.h +++ b/deps/http_parser/http_parser.h @@ -427,6 +427,9 @@ void http_parser_pause(http_parser *parser, int paused); /* Checks if this is the final chunk of the body. */ int http_body_is_final(const http_parser *parser); +/* Change the maximum header size provided at compile time. */ +void http_parser_set_max_header_size(uint32_t size); + #ifdef __cplusplus } #endif diff --git a/doc/api/cli.md b/doc/api/cli.md index 22e087f31725e2..97cfea239e4f1f 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -197,6 +197,13 @@ added: v9.0.0 Specify the `file` of the custom [experimental ECMAScript Module][] loader. +### `--max-http-header-size=size` +<!-- YAML +added: REPLACEME +--> + +Specify the maximum size, in bytes, of HTTP headers. Defaults to 8KB. + ### `--napi-modules` <!-- YAML added: v7.10.0 @@ -620,6 +627,7 @@ Node.js options that are allowed are: - `--inspect-brk` - `--inspect-port` - `--loader` +- `--max-http-header-size` - `--napi-modules` - `--no-deprecation` - `--no-force-async-hooks-checks` diff --git a/doc/node.1 b/doc/node.1 index ca16c78a51777c..66933066bcae15 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -139,6 +139,9 @@ Specify the as a custom loader, to load .Fl -experimental-modules . . +.It Fl -max-http-header-size Ns = Ns Ar size +Specify the maximum size of HTTP headers in bytes. Defaults to 8KB. +. .It Fl -napi-modules This option is a no-op. It is kept for compatibility. diff --git a/lib/internal/print_help.js b/lib/internal/print_help.js index c3e984dd9f4987..fdec7bd09bd87e 100644 --- a/lib/internal/print_help.js +++ b/lib/internal/print_help.js @@ -69,6 +69,7 @@ function getArgDescription(type) { case 'kHostPort': return '[host:]port'; case 'kInteger': + case 'kUInteger': case 'kString': case 'kStringList': return '...'; diff --git a/src/node_http_parser_impl.h b/src/node_http_parser_impl.h index 4233ccbfcaae65..9274f9dae0b7f9 100644 --- a/src/node_http_parser_impl.h +++ b/src/node_http_parser_impl.h @@ -830,7 +830,7 @@ class Parser : public AsyncWrap, public StreamListener { int TrackHeader(size_t len) { #ifdef NODE_EXPERIMENTAL_HTTP header_nread_ += len; - if (header_nread_ >= kMaxHeaderSize) { + if (header_nread_ >= per_process_opts->max_http_header_size) { llhttp_set_error_reason(&parser_, "HPE_HEADER_OVERFLOW:Header overflow"); return HPE_USER; } @@ -892,9 +892,6 @@ class Parser : public AsyncWrap, public StreamListener { typedef int (Parser::*DataCall)(const char* at, size_t length); static const parser_settings_t settings; -#ifdef NODE_EXPERIMENTAL_HTTP - static const uint64_t kMaxHeaderSize = 8 * 1024; -#endif /* NODE_EXPERIMENTAL_HTTP */ }; const parser_settings_t Parser::settings = { @@ -916,6 +913,14 @@ const parser_settings_t Parser::settings = { }; +#ifndef NODE_EXPERIMENTAL_HTTP +void InitMaxHttpHeaderSizeOnce() { + const uint32_t max_http_header_size = per_process_opts->max_http_header_size; + http_parser_set_max_header_size(max_http_header_size); +} +#endif /* NODE_EXPERIMENTAL_HTTP */ + + void InitializeHttpParser(Local<Object> target, Local<Value> unused, Local<Context> context, @@ -965,6 +970,11 @@ void InitializeHttpParser(Local<Object> target, target->Set(env->context(), FIXED_ONE_BYTE_STRING(env->isolate(), "HTTPParser"), t->GetFunction(env->context()).ToLocalChecked()).FromJust(); + +#ifndef NODE_EXPERIMENTAL_HTTP + static uv_once_t init_once = UV_ONCE_INIT; + uv_once(&init_once, InitMaxHttpHeaderSizeOnce); +#endif /* NODE_EXPERIMENTAL_HTTP */ } } // anonymous namespace diff --git a/src/node_options-inl.h b/src/node_options-inl.h index d25557e6a50637..f482bcd36660ed 100644 --- a/src/node_options-inl.h +++ b/src/node_options-inl.h @@ -39,6 +39,19 @@ void OptionsParser<Options>::AddOption(const std::string& name, help_text}); } +template <typename Options> +void OptionsParser<Options>::AddOption(const std::string& name, + const std::string& help_text, + uint64_t Options::* field, + OptionEnvvarSettings env_setting) { + options_.emplace( + name, + OptionInfo{kUInteger, + std::make_shared<SimpleOptionField<uint64_t>>(field), + env_setting, + help_text}); +} + template <typename Options> void OptionsParser<Options>::AddOption(const std::string& name, const std::string& help_text, @@ -401,6 +414,9 @@ void OptionsParser<Options>::Parse( case kInteger: *Lookup<int64_t>(info.field, options) = std::atoll(value.c_str()); break; + case kUInteger: + *Lookup<uint64_t>(info.field, options) = std::stoull(value.c_str()); + break; case kString: *Lookup<std::string>(info.field, options) = value; break; diff --git a/src/node_options.cc b/src/node_options.cc index d665bc40400071..117bff4a944b39 100644 --- a/src/node_options.cc +++ b/src/node_options.cc @@ -259,6 +259,10 @@ PerProcessOptionsParser::PerProcessOptionsParser() { kAllowedInEnvironment); AddAlias("--trace-events-enabled", { "--trace-event-categories", "v8,node,node.async_hooks" }); + AddOption("--max-http-header-size", + "set the maximum size of HTTP headers (default: 8KB)", + &PerProcessOptions::max_http_header_size, + kAllowedInEnvironment); AddOption("--v8-pool-size", "set V8's thread pool size", &PerProcessOptions::v8_thread_pool_size, @@ -429,6 +433,9 @@ void GetOptions(const FunctionCallbackInfo<Value>& args) { case kInteger: value = Number::New(isolate, *parser.Lookup<int64_t>(field, opts)); break; + case kUInteger: + value = Number::New(isolate, *parser.Lookup<uint64_t>(field, opts)); + break; case kString: if (!ToV8Value(context, *parser.Lookup<std::string>(field, opts)) .ToLocal(&value)) { @@ -516,6 +523,7 @@ void Initialize(Local<Object> target, NODE_DEFINE_CONSTANT(types, kV8Option); NODE_DEFINE_CONSTANT(types, kBoolean); NODE_DEFINE_CONSTANT(types, kInteger); + NODE_DEFINE_CONSTANT(types, kUInteger); NODE_DEFINE_CONSTANT(types, kString); NODE_DEFINE_CONSTANT(types, kHostPort); NODE_DEFINE_CONSTANT(types, kStringList); diff --git a/src/node_options.h b/src/node_options.h index d317e12b6f0db8..ae7d7a4be5f065 100644 --- a/src/node_options.h +++ b/src/node_options.h @@ -151,6 +151,7 @@ class PerProcessOptions : public Options { std::string title; std::string trace_event_categories; std::string trace_event_file_pattern = "node_trace.${rotation}.log"; + uint64_t max_http_header_size = 8 * 1024; int64_t v8_thread_pool_size = 4; bool zero_fill_all_buffers = false; @@ -203,6 +204,7 @@ enum OptionType { kV8Option, kBoolean, kInteger, + kUInteger, kString, kHostPort, kStringList, @@ -229,6 +231,10 @@ class OptionsParser { const std::string& help_text, bool Options::* field, OptionEnvvarSettings env_setting = kDisallowedInEnvironment); + void AddOption(const std::string& name, + const std::string& help_text, + uint64_t Options::* field, + OptionEnvvarSettings env_setting = kDisallowedInEnvironment); void AddOption(const std::string& name, const std::string& help_text, int64_t Options::* field, diff --git a/test/sequential/test-http-max-http-headers.js b/test/sequential/test-http-max-http-headers.js index f1be923e225989..b91135a61a2a5a 100644 --- a/test/sequential/test-http-max-http-headers.js +++ b/test/sequential/test-http-max-http-headers.js @@ -4,10 +4,14 @@ const common = require('../common'); const assert = require('assert'); const http = require('http'); const net = require('net'); -const MAX = 8 * 1024; // 8KB +const MAX = +(process.argv[2] || 8 * 1024); // Command line option, or 8KB. const { getOptionValue } = require('internal/options'); +console.log('pid is', process.pid); +console.log('max header size is', getOptionValue('--max-http-header-size')); +console.log('current http parser is', getOptionValue('--http-parser')); + // Verify that we cannot receive more than 8KB of headers. function once(cb) { @@ -38,19 +42,15 @@ function fillHeaders(headers, currentSize, valid = false) { // Generate valid headers if (valid) { - // TODO(mcollina): understand why -9 is needed instead of -1 - headers = headers.slice(0, -9); + // TODO(mcollina): understand why -32 is needed instead of -1 + headers = headers.slice(0, -32); } return headers + '\r\n\r\n'; } -const timeout = common.platformTimeout(10); - function writeHeaders(socket, headers) { const array = []; - - // This is off from 1024 so that \r\n does not get split - const chunkSize = 1000; + const chunkSize = 100; let last = 0; for (let i = 0; i < headers.length / chunkSize; i++) { @@ -65,19 +65,25 @@ function writeHeaders(socket, headers) { next(); function next() { - if (socket.write(array.shift())) { - if (array.length === 0) { - socket.end(); - } else { - setTimeout(next, timeout); - } + if (socket.destroyed) { + console.log('socket was destroyed early, data left to write:', + array.join('').length); + return; + } + + const chunk = array.shift(); + + if (chunk) { + console.log('writing chunk of size', chunk.length); + socket.write(chunk, next); } else { - socket.once('drain', next); + socket.end(); } } } function test1() { + console.log('test1'); let headers = 'HTTP/1.1 200 OK\r\n' + 'Content-Length: 0\r\n' + @@ -92,6 +98,9 @@ function test1() { writeHeaders(sock, headers); sock.resume(); }); + + // The socket might error but that's ok + sock.on('error', () => {}); }); server.listen(0, common.mustCall(() => { @@ -100,17 +109,17 @@ function test1() { client.on('error', common.mustCall((err) => { assert.strictEqual(err.code, 'HPE_HEADER_OVERFLOW'); - server.close(); - setImmediate(test2); + server.close(test2); })); })); } const test2 = common.mustCall(() => { + console.log('test2'); let headers = 'GET / HTTP/1.1\r\n' + 'Host: localhost\r\n' + - 'Agent: node\r\n' + + 'Agent: nod2\r\n' + 'X-CRASH: '; // /, Host, localhost, Agent, node, X-CRASH, a... @@ -119,7 +128,7 @@ const test2 = common.mustCall(() => { const server = http.createServer(common.mustNotCall()); - server.on('clientError', common.mustCall((err) => { + server.once('clientError', common.mustCall((err) => { assert.strictEqual(err.code, 'HPE_HEADER_OVERFLOW'); })); @@ -131,34 +140,46 @@ const test2 = common.mustCall(() => { }); finished(client, common.mustCall((err) => { - server.close(); - setImmediate(test3); + server.close(test3); })); })); }); const test3 = common.mustCall(() => { + console.log('test3'); let headers = 'GET / HTTP/1.1\r\n' + 'Host: localhost\r\n' + - 'Agent: node\r\n' + + 'Agent: nod3\r\n' + 'X-CRASH: '; // /, Host, localhost, Agent, node, X-CRASH, a... const currentSize = 1 + 4 + 9 + 5 + 4 + 7; headers = fillHeaders(headers, currentSize, true); + console.log('writing', headers.length); + const server = http.createServer(common.mustCall((req, res) => { - res.end('hello world'); - setImmediate(server.close.bind(server)); + res.end('hello from test3 server'); + server.close(); })); + server.on('clientError', (err) => { + console.log(err.code); + if (err.code === 'HPE_HEADER_OVERFLOW') { + console.log(err.rawPacket.toString('hex')); + } + }); + server.on('clientError', common.mustNotCall()); + server.listen(0, common.mustCall(() => { const client = net.connect(server.address().port); client.on('connect', () => { writeHeaders(client, headers); client.resume(); }); + + client.pipe(process.stdout); })); }); diff --git a/test/sequential/test-set-http-max-http-headers.js b/test/sequential/test-set-http-max-http-headers.js new file mode 100644 index 00000000000000..657f9948be149d --- /dev/null +++ b/test/sequential/test-set-http-max-http-headers.js @@ -0,0 +1,109 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { spawn } = require('child_process'); +const path = require('path'); +const testName = path.join(__dirname, 'test-http-max-http-headers.js'); +const parsers = ['legacy', 'llhttp']; + +const timeout = common.platformTimeout(100); + +const tests = []; + +function test(fn) { + tests.push(fn); +} + +parsers.forEach((parser) => { + test(function(cb) { + console.log('running subtest expecting failure'); + + // Validate that the test fails if the max header size is too small. + const args = ['--expose-internals', + `--http-parser=${parser}`, + '--max-http-header-size=1024', + testName]; + const cp = spawn(process.execPath, args, { stdio: 'inherit' }); + + cp.on('close', common.mustCall((code, signal) => { + assert.strictEqual(code, 1); + assert.strictEqual(signal, null); + cb(); + })); + }); + + test(function(cb) { + console.log('running subtest expecting success'); + + const env = Object.assign({}, process.env, { + NODE_DEBUG: 'http' + }); + + // Validate that the test fails if the max header size is too small. + // Validate that the test now passes if the same limit becomes large enough. + const args = ['--expose-internals', + `--http-parser=${parser}`, + '--max-http-header-size=1024', + testName, + '1024']; + const cp = spawn(process.execPath, args, { + env, + stdio: 'inherit' + }); + + cp.on('close', common.mustCall((code, signal) => { + assert.strictEqual(code, 0); + assert.strictEqual(signal, null); + cb(); + })); + }); + + // Next, repeat the same checks using NODE_OPTIONS if it is supported. + if (process.config.variables.node_without_node_options) { + const env = Object.assign({}, process.env, { + NODE_OPTIONS: `--http-parser=${parser} --max-http-header-size=1024` + }); + + test(function(cb) { + console.log('running subtest expecting failure'); + + // Validate that the test fails if the max header size is too small. + const args = ['--expose-internals', testName]; + const cp = spawn(process.execPath, args, { env, stdio: 'inherit' }); + + cp.on('close', common.mustCall((code, signal) => { + assert.strictEqual(code, 1); + assert.strictEqual(signal, null); + cb(); + })); + }); + + test(function(cb) { + // Validate that the test now passes if the same limit + // becomes large enough. + const args = ['--expose-internals', testName, '1024']; + const cp = spawn(process.execPath, args, { env, stdio: 'inherit' }); + + cp.on('close', common.mustCall((code, signal) => { + assert.strictEqual(code, 0); + assert.strictEqual(signal, null); + cb(); + })); + }); + } +}); + +function runTest() { + const fn = tests.shift(); + + if (!fn) { + return; + } + + fn(() => { + setTimeout(runTest, timeout); + }); +} + +runTest();