Compare commits

..

4 Commits

20 changed files with 4130 additions and 1251 deletions

6
.gitignore vendored
View File

@ -1,2 +1,4 @@
SOURCES/icu4c-64_2-src.tgz SOURCES/icu4c-78.2-data-bin-b.zip
SOURCES/node-v10.24.0-stripped.tar.gz SOURCES/icu4c-78.2-data-bin-l.zip
SOURCES/node-v24.14.1-stripped.tar.gz
SOURCES/packaging-scripts.tar.gz

View File

@ -1,2 +1,4 @@
3127155ecf2b75ab4835f501b7478e39c07bb852 SOURCES/icu4c-64_2-src.tgz 7a91e81c4f2c8368d80285a5bbdfe278d68e4a84 SOURCES/icu4c-78.2-data-bin-b.zip
be0e0b385a852c376f452b3d94727492e05407e4 SOURCES/node-v10.24.0-stripped.tar.gz b9f5918e2118ef8531b0ffc04b3d50e951e3a166 SOURCES/icu4c-78.2-data-bin-l.zip
4b8d577fed40b7065f78da8d4257775cb9a56a35 SOURCES/node-v24.14.1-stripped.tar.gz
2399006236317582f75d9525d5aa3b75fcd8a00d SOURCES/packaging-scripts.tar.gz

View File

@ -1,31 +0,0 @@
From 2cd4c12776af3da588231d3eb498e6451c30eae5 Mon Sep 17 00:00:00 2001
From: Zuzana Svetlikova <zsvetlik@redhat.com>
Date: Thu, 27 Apr 2017 14:25:42 +0200
Subject: [PATCH] Disable running gyp on shared deps
Signed-off-by: rpm-build <rpm-build>
---
Makefile | 7 +++----
1 file changed, 3 insertions(+), 4 deletions(-)
diff --git a/Makefile b/Makefile
index 73feb4c..45bbceb 100644
--- a/Makefile
+++ b/Makefile
@@ -123,10 +123,9 @@ with-code-cache:
test-code-cache: with-code-cache
$(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=$(BUILDTYPE_LOWER) code-cache
-out/Makefile: common.gypi deps/uv/uv.gyp deps/http_parser/http_parser.gyp \
- deps/zlib/zlib.gyp deps/v8/gypfiles/toolchain.gypi \
- deps/v8/gypfiles/features.gypi deps/v8/gypfiles/v8.gyp node.gyp \
- config.gypi
+out/Makefile: common.gypi deps/http_parser/http_parser.gyp \
+ deps/v8/gypfiles/toolchain.gypi deps/v8/gypfiles/features.gypi \
+ deps/v8/gypfiles/v8.gyp node.gyp config.gypi
$(PYTHON) tools/gyp_node.py -f make
config.gypi: configure configure.py
--
2.26.2

View File

@ -0,0 +1,46 @@
From e93d9b5fdcd8e5744de629461c03a07de2252f8f Mon Sep 17 00:00:00 2001
From: Stephen Gallagher <sgallagh@redhat.com>
Date: Fri, 17 Apr 2020 12:59:44 +0200
Subject: [PATCH] Remove unused OpenSSL config
The build process will try to create these config files, even when
using the system OpenSSL and will thus fail since we strip this path
from the tarball.
Signed-off-by: Stephen Gallagher <sgallagh@redhat.com>
Signed-off-by: rpm-build <rpm-build>
---
node.gyp | 17 -----------------
1 file changed, 17 deletions(-)
diff --git a/node.gyp b/node.gyp
index 1147495..da6ea50 100644
--- a/node.gyp
+++ b/node.gyp
@@ -822,23 +822,6 @@
],
},
],
- }, {
- 'variables': {
- 'opensslconfig_internal': '<(obj_dir)/deps/openssl/openssl.cnf',
- 'opensslconfig': './deps/openssl/nodejs-openssl.cnf',
- },
- 'actions': [
- {
- 'action_name': 'reset_openssl_cnf',
- 'inputs': [ '<(opensslconfig)', ],
- 'outputs': [ '<(opensslconfig_internal)', ],
- 'action': [
- '<(python)', 'tools/copyfile.py',
- '<(opensslconfig)',
- '<(opensslconfig_internal)',
- ],
- },
- ],
}],
],
}, # node_core_target_name
--
2.47.0

View File

@ -0,0 +1,84 @@
From 98738d27288bd9ca634e29181ef665e812e7bbd3 Mon Sep 17 00:00:00 2001
From: Michael Dawson <midawson@redhat.com>
Date: Fri, 23 Feb 2024 13:43:56 +0100
Subject: [PATCH] Disable FIPS options
On RHEL, FIPS should be configured only on system level.
Additionally, the related options may cause segfault when used on RHEL.
This patch causes the option processing to end sooner
than the problematic code gets executed.
Additionally, the JS-level options to mess with FIPS settings
are similarly disabled.
Upstream report: https://github.com/nodejs/node/pull/48950
RHBZ: https://bugzilla.redhat.com/show_bug.cgi?id=2226726
---
lib/crypto.js | 10 ++++++++++
lib/internal/errors.js | 6 ++++++
src/crypto/crypto_util.cc | 2 ++
3 files changed, 18 insertions(+)
diff --git a/lib/crypto.js b/lib/crypto.js
index 41adecc..b2627ac 100644
--- a/lib/crypto.js
+++ b/lib/crypto.js
@@ -36,7 +36,10 @@ const {
assertCrypto();
const {
+ // RHEL specific error
+ ERR_CRYPTO_FIPS_SYSTEM_CONTROLLED,
+
ERR_CRYPTO_FIPS_FORCED,
ERR_WORKER_UNSUPPORTED_OPERATION,
} = require('internal/errors').codes;
const constants = internalBinding('constants').crypto;
@@ -251,6 +254,13 @@ function getFips() {
}
function setFips(val) {
+ // in RHEL FIPS enable/disable should only be done at system level
+ if (getFips() != val) {
+ throw new ERR_CRYPTO_FIPS_SYSTEM_CONTROLLED();
+ } else {
+ return;
+ }
+
if (getOptionValue('--force-fips')) {
if (val) return;
throw new ERR_CRYPTO_FIPS_FORCED();
diff --git a/lib/internal/errors.js b/lib/internal/errors.js
index a722360..04d8a53 100644
--- a/lib/internal/errors.js
+++ b/lib/internal/errors.js
@@ -1111,6 +1111,12 @@ module.exports = {
//
// Note: Node.js specific errors must begin with the prefix ERR_
+// insert RHEL specific erro
+E('ERR_CRYPTO_FIPS_SYSTEM_CONTROLLED',
+ 'Cannot set FIPS mode. FIPS should be enabled/disabled at system level. See' +
+ 'https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/9/html/security_hardening/assembly_installing-the-system-in-fips-mode_security-hardening for more details.\n',
+ Error);
+
E('ERR_ACCESS_DENIED',
function(msg, permission = '', resource = '') {
this.permission = permission;
diff --git a/src/crypto/crypto_util.cc b/src/crypto/crypto_util.cc
index 5734d8f..ef9d1b1 100644
--- a/src/crypto/crypto_util.cc
+++ b/src/crypto/crypto_util.cc
@@ -86,6 +86,8 @@ bool ProcessFipsOptions() {
/* Override FIPS settings in configuration file, if needed. */
if (per_process::cli_options->enable_fips_crypto ||
per_process::cli_options->force_fips_crypto) {
+ fprintf(stderr, "ERROR: Using options related to FIPS is not recommended, configure FIPS in openssl instead. See https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/9/html/security_hardening/assembly_installing-the-system-in-fips-mode_security-hardening for more details.\n");
+ return false;
#if OPENSSL_VERSION_MAJOR >= 3
if (!ncrypto::testFipsEnabled()) return false;
return ncrypto::setFipsEnabled(true, nullptr);
--
2.43.2

View File

@ -1,84 +0,0 @@
From e7afb2d6e2a6c8f9c9c32e12a10c3c5c4902a251 Mon Sep 17 00:00:00 2001
From: Stephen Gallagher <sgallagh@redhat.com>
Date: Tue, 1 May 2018 08:05:30 -0400
Subject: [PATCH] Suppress NPM message to run global update
Signed-off-by: Stephen Gallagher <sgallagh@redhat.com>
Signed-off-by: rpm-build <rpm-build>
---
deps/npm/bin/npm-cli.js | 54 -----------------------------------------
1 file changed, 54 deletions(-)
diff --git a/deps/npm/bin/npm-cli.js b/deps/npm/bin/npm-cli.js
index c0d9be0..0f0892e 100755
--- a/deps/npm/bin/npm-cli.js
+++ b/deps/npm/bin/npm-cli.js
@@ -71,65 +71,11 @@
npm.command = 'help'
}
- var isGlobalNpmUpdate = conf.global && ['install', 'update'].includes(npm.command) && npm.argv.includes('npm')
-
// now actually fire up npm and run the command.
// this is how to use npm programmatically:
conf._exit = true
npm.load(conf, function (er) {
if (er) return errorHandler(er)
- if (
- !isGlobalNpmUpdate &&
- npm.config.get('update-notifier') &&
- !unsupported.checkVersion(process.version).unsupported
- ) {
- const pkg = require('../package.json')
- let notifier = require('update-notifier')({pkg})
- const isCI = require('ci-info').isCI
- if (
- notifier.update &&
- notifier.update.latest !== pkg.version &&
- !isCI
- ) {
- const color = require('ansicolors')
- const useColor = npm.config.get('color')
- const useUnicode = npm.config.get('unicode')
- const old = notifier.update.current
- const latest = notifier.update.latest
- let type = notifier.update.type
- if (useColor) {
- switch (type) {
- case 'major':
- type = color.red(type)
- break
- case 'minor':
- type = color.yellow(type)
- break
- case 'patch':
- type = color.green(type)
- break
- }
- }
- const changelog = `https://github.com/npm/cli/releases/tag/v${latest}`
- notifier.notify({
- message: `New ${type} version of ${pkg.name} available! ${
- useColor ? color.red(old) : old
- } ${useUnicode ? '→' : '->'} ${
- useColor ? color.green(latest) : latest
- }\n` +
- `${
- useColor ? color.yellow('Changelog:') : 'Changelog:'
- } ${
- useColor ? color.cyan(changelog) : changelog
- }\n` +
- `Run ${
- useColor
- ? color.green(`npm install -g ${pkg.name}`)
- : `npm i -g ${pkg.name}`
- } to update!`
- })
- }
- }
npm.commands[npm.command](npm.argv, function (err) {
// https://genius.com/Lin-manuel-miranda-your-obedient-servant-lyrics
if (
--
2.26.2

View File

@ -1,122 +0,0 @@
From 0028cc74dac4dd24b8599ade85cb49fdafa9f559 Mon Sep 17 00:00:00 2001
From: Stephen Gallagher <sgallagh@redhat.com>
Date: Fri, 6 Dec 2019 16:40:25 -0500
Subject: [PATCH] build: auto-load ICU data from --with-icu-default-data-dir
When compiled with `--with-intl=small` and
`--with-icu-default-data-dir=PATH`, Node.js will use PATH as a
fallback location for the ICU data.
We will first perform an access check using fopen(PATH, 'r') to
ensure that the file is readable. If it is, we'll set the
icu_data_directory and proceed. There's a slight overhead for the
fopen() check, but it should be barely measurable.
This will be useful for Linux distribution packagers who want to
be able to ship a minimal node binary in a container image but
also be able to add on the full i18n support where needed. With
this patch, it becomes possible to ship the interpreter as
/usr/bin/node in one package for the distribution and to ship the
data files in another package (without a strict dependency
between the two). This means that users of the distribution will
not need to explicitly direct Node.js to locate the ICU data. It
also means that in environments where full internationalization is
not required, they do not need to carry the extra content (with
the associated storage costs).
Refs: https://github.com/nodejs/node/issues/3460
Signed-off-by: Stephen Gallagher <sgallagh@redhat.com>
Signed-off-by: rpm-build <rpm-build>
---
configure.py | 9 +++++++++
node.gypi | 7 +++++++
src/node.cc | 20 ++++++++++++++++++++
3 files changed, 36 insertions(+)
diff --git a/configure.py b/configure.py
index 89f7bf5..d611a88 100755
--- a/configure.py
+++ b/configure.py
@@ -433,6 +433,14 @@ intl_optgroup.add_option('--with-icu-source',
'the icu4c source archive. '
'v%d.x or later recommended.' % icu_versions['minimum_icu'])
+intl_optgroup.add_option('--with-icu-default-data-dir',
+ action='store',
+ dest='with_icu_default_data_dir',
+ help='Path to the icuXXdt{lb}.dat file. If unspecified, ICU data will '
+ 'only be read if the NODE_ICU_DATA environment variable or the '
+ '--icu-data-dir runtime argument is used. This option has effect '
+ 'only when Node.js is built with --with-intl=small-icu.')
+
parser.add_option('--with-ltcg',
action='store_true',
dest='with_ltcg',
@@ -1359,6 +1367,7 @@ def configure_intl(o):
locs.add('root') # must have root
o['variables']['icu_locales'] = string.join(locs,',')
# We will check a bit later if we can use the canned deps/icu-small
+ o['variables']['icu_default_data'] = options.with_icu_default_data_dir or ''
elif with_intl == 'full-icu':
# full ICU
o['variables']['v8_enable_i18n_support'] = 1
diff --git a/node.gypi b/node.gypi
index 466a174..65b97d6 100644
--- a/node.gypi
+++ b/node.gypi
@@ -113,6 +113,13 @@
'conditions': [
[ 'icu_small=="true"', {
'defines': [ 'NODE_HAVE_SMALL_ICU=1' ],
+ 'conditions': [
+ [ 'icu_default_data!=""', {
+ 'defines': [
+ 'NODE_ICU_DEFAULT_DATA_DIR="<(icu_default_data)"',
+ ],
+ }],
+ ],
}]],
}],
[ 'node_use_bundled_v8=="true" and \
diff --git a/src/node.cc b/src/node.cc
index 7c01187..c9840e3 100644
--- a/src/node.cc
+++ b/src/node.cc
@@ -92,6 +92,7 @@
#if defined(NODE_HAVE_I18N_SUPPORT)
#include <unicode/uvernum.h>
+#include <unicode/utypes.h>
#endif
#if defined(LEAK_SANITIZER)
@@ -2643,6 +2644,25 @@ void Init(std::vector<std::string>* argv,
// If the parameter isn't given, use the env variable.
if (per_process_opts->icu_data_dir.empty())
SafeGetenv("NODE_ICU_DATA", &per_process_opts->icu_data_dir);
+
+#ifdef NODE_ICU_DEFAULT_DATA_DIR
+ // If neither the CLI option nor the environment variable was specified,
+ // fall back to the configured default
+ if (per_process_opts->icu_data_dir.empty()) {
+ // Check whether the NODE_ICU_DEFAULT_DATA_DIR contains the right data
+ // file and can be read.
+ static const char full_path[] =
+ NODE_ICU_DEFAULT_DATA_DIR "/" U_ICUDATA_NAME ".dat";
+
+ FILE* f = fopen(full_path, "rb");
+
+ if (f != nullptr) {
+ fclose(f);
+ per_process_opts->icu_data_dir = NODE_ICU_DEFAULT_DATA_DIR;
+ }
+ }
+#endif // NODE_ICU_DEFAULT_DATA_DIR
+
// Initialize ICU.
// If icu_data_dir is empty here, it will load the 'minimal' data.
if (!i18n::InitializeICUDirectory(per_process_opts->icu_data_dir)) {
--
2.26.2

View File

@ -0,0 +1,530 @@
From 205baae16ff43539500a6f168d0d27e226bfc4f2 Mon Sep 17 00:00:00 2001
From: rpm-build <rpm-build>
Date: Wed, 25 Mar 2026 15:36:44 +0100
Subject: [PATCH] downstream: update nghttp2 to 1.68.1
This is done out of sync with upstream node,
in order to address CVE-2026-27135 in a timely manner.
Should be dropped once upstream addresses the CVE in their own way.
Signed-off-by: rpm-build <rpm-build>
---
.../nghttp2/lib/includes/nghttp2/nghttp2ver.h | 4 +-
deps/nghttp2/lib/nghttp2_frame.c | 10 +
deps/nghttp2/lib/nghttp2_int.h | 6 +-
deps/nghttp2/lib/nghttp2_session.c | 206 ++++++++++--------
4 files changed, 135 insertions(+), 91 deletions(-)
diff --git a/deps/nghttp2/lib/includes/nghttp2/nghttp2ver.h b/deps/nghttp2/lib/includes/nghttp2/nghttp2ver.h
index 8dfe536..95acef2 100644
--- a/deps/nghttp2/lib/includes/nghttp2/nghttp2ver.h
+++ b/deps/nghttp2/lib/includes/nghttp2/nghttp2ver.h
@@ -29,7 +29,7 @@
* @macro
* Version number of the nghttp2 library release
*/
-#define NGHTTP2_VERSION "1.68.0"
+#define NGHTTP2_VERSION "1.68.1"
/**
* @macro
@@ -37,6 +37,6 @@
* release. This is a 24 bit number with 8 bits for major number, 8 bits
* for minor and 8 bits for patch. Version 1.2.3 becomes 0x010203.
*/
-#define NGHTTP2_VERSION_NUM 0x014400
+#define NGHTTP2_VERSION_NUM 0x014401
#endif /* NGHTTP2VER_H */
diff --git a/deps/nghttp2/lib/nghttp2_frame.c b/deps/nghttp2/lib/nghttp2_frame.c
index edc2aaa..264ae9d 100644
--- a/deps/nghttp2/lib/nghttp2_frame.c
+++ b/deps/nghttp2/lib/nghttp2_frame.c
@@ -750,6 +750,16 @@ void nghttp2_frame_unpack_altsvc_payload(nghttp2_extension *frame,
uint8_t *p;
altsvc = frame->payload;
+
+ if (payloadlen == 0) {
+ altsvc->origin = NULL;
+ altsvc->origin_len = 0;
+ altsvc->field_value = NULL;
+ altsvc->field_value_len = 0;
+
+ return;
+ }
+
p = payload;
altsvc->origin = p;
diff --git a/deps/nghttp2/lib/nghttp2_int.h b/deps/nghttp2/lib/nghttp2_int.h
index 4e3b268..d89cf15 100644
--- a/deps/nghttp2/lib/nghttp2_int.h
+++ b/deps/nghttp2/lib/nghttp2_int.h
@@ -52,7 +52,11 @@ typedef enum {
* Unlike NGHTTP2_ERR_IGN_HTTP_HEADER, this does not invoke
* nghttp2_on_invalid_header_callback.
*/
- NGHTTP2_ERR_REMOVE_HTTP_HEADER = -106
+ NGHTTP2_ERR_REMOVE_HTTP_HEADER = -106,
+ /*
+ * Cancel pushed stream.
+ */
+ NGHTTP2_ERR_PUSH_CANCEL = -107,
} nghttp2_internal_error;
#endif /* !defined(NGHTTP2_INT_H) */
diff --git a/deps/nghttp2/lib/nghttp2_session.c b/deps/nghttp2/lib/nghttp2_session.c
index 97d7fda..0fbcc93 100644
--- a/deps/nghttp2/lib/nghttp2_session.c
+++ b/deps/nghttp2/lib/nghttp2_session.c
@@ -3272,7 +3272,9 @@ static int session_call_on_invalid_header(nghttp2_session *session,
session, frame, nv->name->base, nv->name->len, nv->value->base,
nv->value->len, nv->flags, session->user_data);
} else {
- return NGHTTP2_ERR_TEMPORAL_CALLBACK_FAILURE;
+ /* If both callbacks are not set, the invalid field nv is
+ ignored. */
+ return 0;
}
if (rv == NGHTTP2_ERR_PAUSE || rv == NGHTTP2_ERR_TEMPORAL_CALLBACK_FAILURE) {
@@ -3357,6 +3359,10 @@ static uint32_t get_error_code_from_lib_error_code(int lib_error_code) {
case NGHTTP2_ERR_HTTP_HEADER:
case NGHTTP2_ERR_HTTP_MESSAGING:
return NGHTTP2_PROTOCOL_ERROR;
+ case NGHTTP2_ERR_INTERNAL:
+ return NGHTTP2_INTERNAL_ERROR;
+ case NGHTTP2_ERR_PUSH_CANCEL:
+ return NGHTTP2_CANCEL;
default:
return NGHTTP2_INTERNAL_ERROR;
}
@@ -3408,7 +3414,7 @@ static int session_handle_invalid_stream2(nghttp2_session *session,
if (rv != 0) {
return rv;
}
- if (session->callbacks.on_invalid_frame_recv_callback) {
+ if (frame && session->callbacks.on_invalid_frame_recv_callback) {
if (session->callbacks.on_invalid_frame_recv_callback(
session, frame, lib_error_code, session->user_data) != 0) {
return NGHTTP2_ERR_CALLBACK_FAILURE;
@@ -3563,7 +3569,29 @@ static int inflate_header_block(nghttp2_session *session, nghttp2_frame *frame,
rv2 = session_call_on_invalid_header(session, frame, &nv);
if (rv2 == NGHTTP2_ERR_TEMPORAL_CALLBACK_FAILURE) {
- rv = NGHTTP2_ERR_HTTP_HEADER;
+ DEBUGF("recv: HTTP error: type=%u, id=%d, header %.*s: %.*s\n",
+ frame->hd.type, frame->hd.stream_id, (int)nv.name->len,
+ nv.name->base, (int)nv.value->len, nv.value->base);
+
+ rv = session_call_error_callback(
+ session, NGHTTP2_ERR_HTTP_HEADER,
+ "Invalid HTTP header field was received: frame type: "
+ "%u, stream: %d, name: [%.*s], value: [%.*s]",
+ frame->hd.type, frame->hd.stream_id, (int)nv.name->len,
+ nv.name->base, (int)nv.value->len, nv.value->base);
+
+ if (nghttp2_is_fatal(rv)) {
+ return rv;
+ }
+
+ rv = session_handle_invalid_stream2(
+ session, subject_stream->stream_id, frame,
+ NGHTTP2_ERR_HTTP_HEADER);
+ if (nghttp2_is_fatal(rv)) {
+ return rv;
+ }
+
+ return NGHTTP2_ERR_TEMPORAL_CALLBACK_FAILURE;
} else {
if (rv2 != 0) {
return rv2;
@@ -3603,13 +3631,8 @@ static int inflate_header_block(nghttp2_session *session, nghttp2_frame *frame,
return rv;
}
- rv =
- session_handle_invalid_stream2(session, subject_stream->stream_id,
- frame, NGHTTP2_ERR_HTTP_HEADER);
- if (nghttp2_is_fatal(rv)) {
- return rv;
- }
- return NGHTTP2_ERR_TEMPORAL_CALLBACK_FAILURE;
+ return nghttp2_session_terminate_session(session,
+ NGHTTP2_PROTOCOL_ERROR);
}
}
if (rv == 0) {
@@ -3722,27 +3745,7 @@ static int session_after_header_block_received(nghttp2_session *session) {
}
}
if (rv != 0) {
- int32_t stream_id;
-
- if (frame->hd.type == NGHTTP2_PUSH_PROMISE) {
- stream_id = frame->push_promise.promised_stream_id;
- } else {
- stream_id = frame->hd.stream_id;
- }
-
- rv = session_handle_invalid_stream2(session, stream_id, frame,
- NGHTTP2_ERR_HTTP_MESSAGING);
- if (nghttp2_is_fatal(rv)) {
- return rv;
- }
-
- if (frame->hd.type == NGHTTP2_HEADERS &&
- (frame->hd.flags & NGHTTP2_FLAG_END_STREAM)) {
- nghttp2_stream_shutdown(stream, NGHTTP2_SHUT_RD);
- /* Don't call nghttp2_session_close_stream_if_shut_rdwr
- because RST_STREAM has been submitted. */
- }
- return 0;
+ return nghttp2_session_terminate_session(session, NGHTTP2_PROTOCOL_ERROR);
}
}
@@ -4078,8 +4081,7 @@ static int update_remote_initial_window_size_func(void *entry, void *ptr) {
rv = nghttp2_stream_update_remote_initial_window_size(
stream, arg->new_window_size, arg->old_window_size);
if (rv != 0) {
- return nghttp2_session_add_rst_stream(arg->session, stream->stream_id,
- NGHTTP2_FLOW_CONTROL_ERROR);
+ return NGHTTP2_ERR_FLOW_CONTROL;
}
/* If window size gets positive, push deferred DATA frame to
@@ -4105,6 +4107,8 @@ static int update_remote_initial_window_size_func(void *entry, void *ptr) {
*
* NGHTTP2_ERR_NOMEM
* Out of memory.
+ * NGHTTP2_ERR_FLOW_CONTROL
+ * Window size gets out of range.
*/
static int
session_update_remote_initial_window_size(nghttp2_session *session,
@@ -4128,8 +4132,7 @@ static int update_local_initial_window_size_func(void *entry, void *ptr) {
rv = nghttp2_stream_update_local_initial_window_size(
stream, arg->new_window_size, arg->old_window_size);
if (rv != 0) {
- return nghttp2_session_add_rst_stream(arg->session, stream->stream_id,
- NGHTTP2_FLOW_CONTROL_ERROR);
+ return NGHTTP2_ERR_FLOW_CONTROL;
}
if (stream->window_update_queued) {
@@ -4163,6 +4166,8 @@ static int update_local_initial_window_size_func(void *entry, void *ptr) {
*
* NGHTTP2_ERR_NOMEM
* Out of memory.
+ * NGHTTP2_ERR_FLOW_CONTROL
+ * Window size gets out of range.
*/
static int
session_update_local_initial_window_size(nghttp2_session *session,
@@ -4549,9 +4554,9 @@ int nghttp2_session_on_push_promise_received(nghttp2_session *session,
session->max_incoming_reserved_streams) {
/* Currently, client does not retain closed stream, so we don't
check NGHTTP2_SHUT_RD condition here. */
-
- rv = nghttp2_session_add_rst_stream(
- session, frame->push_promise.promised_stream_id, NGHTTP2_CANCEL);
+ rv = session_handle_invalid_stream2(session,
+ frame->push_promise.promised_stream_id,
+ NULL, NGHTTP2_ERR_PUSH_CANCEL);
if (rv != 0) {
return rv;
}
@@ -4708,8 +4713,9 @@ static int session_on_stream_window_update_received(nghttp2_session *session,
}
if (NGHTTP2_MAX_WINDOW_SIZE - frame->window_update.window_size_increment <
stream->remote_window_size) {
- return session_handle_invalid_stream(session, frame,
- NGHTTP2_ERR_FLOW_CONTROL);
+ return session_handle_invalid_connection(
+ session, frame, NGHTTP2_ERR_FLOW_CONTROL,
+ "WINDOW_UPDATE: window size overflow");
}
stream->remote_window_size += frame->window_update.window_size_increment;
@@ -4939,16 +4945,7 @@ int nghttp2_session_on_data_received(nghttp2_session *session,
if (session_enforce_http_messaging(session) &&
(frame->hd.flags & NGHTTP2_FLAG_END_STREAM)) {
if (nghttp2_http_on_remote_end_stream(stream) != 0) {
- rv = nghttp2_session_add_rst_stream(session, stream->stream_id,
- NGHTTP2_PROTOCOL_ERROR);
- if (nghttp2_is_fatal(rv)) {
- return rv;
- }
-
- nghttp2_stream_shutdown(stream, NGHTTP2_SHUT_RD);
- /* Don't call nghttp2_session_close_stream_if_shut_rdwr because
- RST_STREAM has been submitted. */
- return 0;
+ return nghttp2_session_terminate_session(session, NGHTTP2_PROTOCOL_ERROR);
}
}
@@ -5006,8 +5003,8 @@ int nghttp2_session_update_recv_stream_window_size(nghttp2_session *session,
rv = adjust_recv_window_size(&stream->recv_window_size, delta_size,
stream->local_window_size);
if (rv != 0) {
- return nghttp2_session_add_rst_stream(session, stream->stream_id,
- NGHTTP2_FLOW_CONTROL_ERROR);
+ return nghttp2_session_terminate_session(session,
+ NGHTTP2_FLOW_CONTROL_ERROR);
}
/* We don't have to send WINDOW_UPDATE if the data received is the
last chunk in the incoming stream. */
@@ -5469,6 +5466,10 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
busy = 1;
rv = session_on_data_received_fail_fast(session);
+ if (nghttp2_is_fatal(rv)) {
+ return rv;
+ }
+
if (iframe->state == NGHTTP2_IB_IGN_ALL) {
return (nghttp2_ssize)inlen;
}
@@ -5489,10 +5490,6 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
break;
}
- if (nghttp2_is_fatal(rv)) {
- return rv;
- }
-
rv = inbound_frame_handle_pad(iframe, &iframe->frame.hd);
if (rv < 0) {
rv = nghttp2_session_terminate_session_with_reason(
@@ -5576,6 +5573,10 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
return rv;
}
+ if (iframe->state == NGHTTP2_IB_IGN_ALL) {
+ return (nghttp2_ssize)inlen;
+ }
+
on_begin_frame_called = 1;
rv = session_process_headers_frame(session);
@@ -5590,8 +5591,8 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
}
if (rv == NGHTTP2_ERR_TEMPORAL_CALLBACK_FAILURE) {
- rv = nghttp2_session_add_rst_stream(
- session, iframe->frame.hd.stream_id, NGHTTP2_INTERNAL_ERROR);
+ rv = session_handle_invalid_stream2(
+ session, iframe->frame.hd.stream_id, NULL, NGHTTP2_ERR_INTERNAL);
if (nghttp2_is_fatal(rv)) {
return rv;
}
@@ -6044,6 +6045,10 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
if (nghttp2_is_fatal(rv)) {
return rv;
}
+
+ if (iframe->state == NGHTTP2_IB_IGN_ALL) {
+ return (nghttp2_ssize)inlen;
+ }
}
}
@@ -6107,8 +6112,8 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
}
if (rv == NGHTTP2_ERR_TEMPORAL_CALLBACK_FAILURE) {
- rv = nghttp2_session_add_rst_stream(
- session, iframe->frame.hd.stream_id, NGHTTP2_INTERNAL_ERROR);
+ rv = session_handle_invalid_stream2(
+ session, iframe->frame.hd.stream_id, NULL, NGHTTP2_ERR_INTERNAL);
if (nghttp2_is_fatal(rv)) {
return rv;
}
@@ -6191,9 +6196,9 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
}
if (rv == NGHTTP2_ERR_TEMPORAL_CALLBACK_FAILURE) {
- rv = nghttp2_session_add_rst_stream(
- session, iframe->frame.push_promise.promised_stream_id,
- NGHTTP2_INTERNAL_ERROR);
+ rv = session_handle_invalid_stream2(
+ session, iframe->frame.push_promise.promised_stream_id, NULL,
+ NGHTTP2_ERR_INTERNAL);
if (nghttp2_is_fatal(rv)) {
return rv;
}
@@ -6296,6 +6301,10 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
return rv;
}
+ if (iframe->state == NGHTTP2_IB_IGN_ALL) {
+ return (nghttp2_ssize)inlen;
+ }
+
session_inbound_frame_reset(session);
break;
@@ -6371,12 +6380,12 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
iframe->payloadleft -= hd_proclen;
/* Use promised stream ID for PUSH_PROMISE */
- rv = nghttp2_session_add_rst_stream(
+ rv = session_handle_invalid_stream2(
session,
iframe->frame.hd.type == NGHTTP2_PUSH_PROMISE
? iframe->frame.push_promise.promised_stream_id
: iframe->frame.hd.stream_id,
- NGHTTP2_INTERNAL_ERROR);
+ NULL, NGHTTP2_ERR_INTERNAL);
if (nghttp2_is_fatal(rv)) {
return rv;
}
@@ -6423,6 +6432,10 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
if (nghttp2_is_fatal(rv)) {
return rv;
}
+
+ if (iframe->state == NGHTTP2_IB_IGN_ALL) {
+ return (nghttp2_ssize)inlen;
+ }
}
session_inbound_frame_reset(session);
@@ -6598,6 +6611,10 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
if (nghttp2_is_fatal(rv)) {
return rv;
}
+
+ if (iframe->state == NGHTTP2_IB_IGN_ALL) {
+ return (nghttp2_ssize)inlen;
+ }
} else {
iframe->state = NGHTTP2_IB_IGN_HEADER_BLOCK;
}
@@ -6648,6 +6665,10 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
if (nghttp2_is_fatal(rv)) {
return rv;
}
+
+ if (iframe->state == NGHTTP2_IB_IGN_ALL) {
+ return (nghttp2_ssize)inlen;
+ }
}
busy = 1;
@@ -6720,6 +6741,10 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
return rv;
}
+ if (iframe->state == NGHTTP2_IB_IGN_ALL) {
+ return (nghttp2_ssize)inlen;
+ }
+
data_readlen =
inbound_frame_effective_readlen(iframe, iframe->payloadleft, readlen);
@@ -6749,41 +6774,30 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
if (data_readlen > 0) {
if (session_enforce_http_messaging(session)) {
if (nghttp2_http_on_data_chunk(stream, (size_t)data_readlen) != 0) {
- if (session->opt_flags & NGHTTP2_OPTMASK_NO_AUTO_WINDOW_UPDATE) {
- /* Consume all data for connection immediately here */
- rv = session_update_connection_consumed_size(
- session, (size_t)data_readlen);
-
- if (nghttp2_is_fatal(rv)) {
- return rv;
- }
-
- if (iframe->state == NGHTTP2_IB_IGN_DATA) {
- return (nghttp2_ssize)inlen;
- }
- }
-
- rv = nghttp2_session_add_rst_stream(
- session, iframe->frame.hd.stream_id, NGHTTP2_PROTOCOL_ERROR);
+ rv = nghttp2_session_terminate_session(session,
+ NGHTTP2_PROTOCOL_ERROR);
if (nghttp2_is_fatal(rv)) {
return rv;
}
- busy = 1;
- iframe->state = NGHTTP2_IB_IGN_DATA;
- break;
+
+ return (nghttp2_ssize)inlen;
}
}
if (session->callbacks.on_data_chunk_recv_callback) {
rv = session->callbacks.on_data_chunk_recv_callback(
session, iframe->frame.hd.flags, iframe->frame.hd.stream_id,
in - readlen, (size_t)data_readlen, session->user_data);
- if (rv == NGHTTP2_ERR_PAUSE) {
- return (nghttp2_ssize)(in - first);
- }
-
if (nghttp2_is_fatal(rv)) {
return NGHTTP2_ERR_CALLBACK_FAILURE;
}
+
+ if (iframe->state == NGHTTP2_IB_IGN_ALL) {
+ return (nghttp2_ssize)inlen;
+ }
+
+ if (rv == NGHTTP2_ERR_PAUSE) {
+ return (nghttp2_ssize)(in - first);
+ }
}
}
}
@@ -6797,6 +6811,10 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
return rv;
}
+ if (iframe->state == NGHTTP2_IB_IGN_ALL) {
+ return (nghttp2_ssize)inlen;
+ }
+
session_inbound_frame_reset(session);
break;
@@ -6863,6 +6881,10 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
return rv;
}
+ if (iframe->state == NGHTTP2_IB_IGN_ALL) {
+ return (nghttp2_ssize)inlen;
+ }
+
if (rv != 0) {
busy = 1;
@@ -6881,6 +6903,10 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
return rv;
}
+ if (iframe->state == NGHTTP2_IB_IGN_ALL) {
+ return (nghttp2_ssize)inlen;
+ }
+
session_inbound_frame_reset(session);
break;
@@ -6909,6 +6935,10 @@ nghttp2_ssize nghttp2_session_mem_recv2(nghttp2_session *session,
return rv;
}
+ if (iframe->state == NGHTTP2_IB_IGN_ALL) {
+ return (nghttp2_ssize)inlen;
+ }
+
session_inbound_frame_reset(session);
break;
--
2.53.0

View File

@ -1,13 +0,0 @@
diff --git a/deps/npm/node_modules/y18n/index.js b/deps/npm/node_modules/y18n/index.js
index d720681628..727362aac0 100644
--- a/deps/npm/node_modules/y18n/index.js
+++ b/deps/npm/node_modules/y18n/index.js
@@ -11,7 +11,7 @@ function Y18N (opts) {
this.fallbackToLanguage = typeof opts.fallbackToLanguage === 'boolean' ? opts.fallbackToLanguage : true
// internal stuff.
- this.cache = {}
+ this.cache = Object.create(null)
this.writeQueue = []
}

View File

@ -1,189 +0,0 @@
#!/bin/sh
# Uses Argbash to generate command argument parsing. To update
# arguments, make sure to call
# `argbash nodejs-tarball.sh -o nodejs-tarball.sh`
# ARG_POSITIONAL_SINGLE([version],[Node.js release version],[""])
# ARG_DEFAULTS_POS([])
# ARG_HELP([Tool to aid in Node.js packaging of new releases])
# ARGBASH_GO()
# needed because of Argbash --> m4_ignore([
### START OF CODE GENERATED BY Argbash v2.8.1 one line above ###
# Argbash is a bash code generator used to get arguments parsing right.
# Argbash is FREE SOFTWARE, see https://argbash.io for more info
die()
{
local _ret=$2
test -n "$_ret" || _ret=1
test "$_PRINT_HELP" = yes && print_help >&2
echo "$1" >&2
exit ${_ret}
}
begins_with_short_option()
{
local first_option all_short_options='h'
first_option="${1:0:1}"
test "$all_short_options" = "${all_short_options/$first_option/}" && return 1 || return 0
}
# THE DEFAULTS INITIALIZATION - POSITIONALS
_positionals=()
_arg_version=""
# THE DEFAULTS INITIALIZATION - OPTIONALS
print_help()
{
printf '%s\n' "Tool to aid in Node.js packaging of new releases"
printf 'Usage: %s [-h|--help] [<version>]\n' "$0"
printf '\t%s\n' "<version>: Node.js release version (default: '""')"
printf '\t%s\n' "-h, --help: Prints help"
}
parse_commandline()
{
_positionals_count=0
while test $# -gt 0
do
_key="$1"
case "$_key" in
-h|--help)
print_help
exit 0
;;
-h*)
print_help
exit 0
;;
*)
_last_positional="$1"
_positionals+=("$_last_positional")
_positionals_count=$((_positionals_count + 1))
;;
esac
shift
done
}
handle_passed_args_count()
{
test "${_positionals_count}" -le 1 || _PRINT_HELP=yes die "FATAL ERROR: There were spurious positional arguments --- we expect between 0 and 1, but got ${_positionals_count} (the last one was: '${_last_positional}')." 1
}
assign_positional_args()
{
local _positional_name _shift_for=$1
_positional_names="_arg_version "
shift "$_shift_for"
for _positional_name in ${_positional_names}
do
test $# -gt 0 || break
eval "$_positional_name=\${1}" || die "Error during argument parsing, possibly an Argbash bug." 1
shift
done
}
parse_commandline "$@"
handle_passed_args_count
assign_positional_args 1 "${_positionals[@]}"
# OTHER STUFF GENERATED BY Argbash
### END OF CODE GENERATED BY Argbash (sortof) ### ])
# [ <-- needed because of Argbash
set -e
echo $_arg_version
if [ x$_arg_version != x ]; then
version=$_arg_version
else
version=$(rpm -q --specfile --qf='%{version}\n' nodejs.spec | head -n1)
fi
rm -f node-v${version}.tar.gz node-v${version}-stripped.tar.gz
wget http://nodejs.org/dist/v${version}/node-v${version}.tar.gz \
http://nodejs.org/dist/v${version}/SHASUMS256.txt
sha256sum -c SHASUMS256.txt --ignore-missing
tar -zxf node-v${version}.tar.gz
rm -rf node-v${version}/deps/openssl
tar -zcf node-v${version}-stripped.tar.gz node-v${version}
# Download the matching version of ICU
rm -f icu4c*-src.tgz icu.md5
ICUMD5=$(cat node-v${version}/tools/icu/current_ver.dep |jq -r '.[0].md5')
wget $(cat node-v${version}/tools/icu/current_ver.dep |jq -r '.[0].url')
ICUTARBALL=$(ls -1 icu4c*-src.tgz)
echo "$ICUMD5 $ICUTARBALL" > icu.md5
md5sum -c icu.md5
rm -f icu.md5 SHASUMS256.txt
rhpkg new-sources node-v${version}-stripped.tar.gz icu4c*-src.tgz
rm -f node-v${version}.tar.gz
set +e
# Determine the bundled versions of the various packages
echo "Bundled software versions"
echo "-------------------------"
echo
echo "libnode shared object version"
echo "========================="
grep "define NODE_MODULE_VERSION" node-v${version}/src/node_version.h
echo
echo "V8"
echo "========================="
grep "define V8_MAJOR_VERSION" node-v${version}/deps/v8/include/v8-version.h
grep "define V8_MINOR_VERSION" node-v${version}/deps/v8/include/v8-version.h
grep "define V8_BUILD_NUMBER" node-v${version}/deps/v8/include/v8-version.h
grep "define V8_PATCH_LEVEL" node-v${version}/deps/v8/include/v8-version.h
echo
echo "c-ares"
echo "========================="
grep "define ARES_VERSION_MAJOR" node-v${version}/deps/cares/include/ares_version.h
grep "define ARES_VERSION_MINOR" node-v${version}/deps/cares/include/ares_version.h
grep "define ARES_VERSION_PATCH" node-v${version}/deps/cares/include/ares_version.h
echo
echo "http-parser"
echo "========================="
grep "define HTTP_PARSER_VERSION_MAJOR" node-v${version}/deps/http_parser/http_parser.h
grep "define HTTP_PARSER_VERSION_MINOR" node-v${version}/deps/http_parser/http_parser.h
grep "define HTTP_PARSER_VERSION_PATCH" node-v${version}/deps/http_parser/http_parser.h
echo
echo "libuv"
echo "========================="
grep "define UV_VERSION_MAJOR" node-v${version}/deps/uv/include/uv/version.h
grep "define UV_VERSION_MINOR" node-v${version}/deps/uv/include/uv/version.h
grep "define UV_VERSION_PATCH" node-v${version}/deps/uv/include/uv/version.h
echo
echo "nghttp2"
echo "========================="
grep "define NGHTTP2_VERSION " node-v${version}/deps/nghttp2/lib/includes/nghttp2/nghttp2ver.h
echo
echo "ICU"
echo "========================="
grep "url" node-v${version}/tools/icu/current_ver.dep
echo
echo "punycode"
echo "========================="
grep "'version'" node-v${version}/lib/punycode.js
echo
echo "npm"
echo "========================="
grep "\"version\":" node-v${version}/deps/npm/package.json
echo
echo "Make sure these versions match what is in the RPM spec file"
rm -rf node-v${version}
# ] <-- needed because of Argbash

9
SOURCES/nodejs.pc.in Normal file
View File

@ -0,0 +1,9 @@
prefix=@PREFIX@
includedir=@INCLUDEDIR@
libdir=@LIBDIR@
Name: @PKGCONFNAME@
Description: JavaScript Runtime
Version: @NODEJS_VERSION@
Libs: -L${libdir} -lnode
Cflags: -I${includedir}/node

142
SOURCES/nodejs.srpm.macros Normal file
View File

@ -0,0 +1,142 @@
# ============================================================================
# Vendored dependencies management
# --- Version macros definition
# Parse and normalize version string into several macros.
# By default, stores the whole string in `%<name>_evr` macro,
# then automatically strips any epoch and/or release parts
# (specified in the standard "E:V-R" format)
# and defines `%<name>_epoch`, `%<name>_version`, and `%<name>_release` macros.
#
# With the `-p` option, the version is additionally split into
# `%<name>_version_major`, `%<name>_version_minor`, and `%<name>_version_patch` macros.
#
# Any would-be empty macro will evaluate to `%{nil}`.
#
# Options:
# -p : Also define the partial macros.
#
# Arguments:
# 1: Name of the dependency. Any `-' will be replaced by `_' in the macro names.
# 2: The EVR string to parse.
%nodejs_define_version(p) %{lua:
local fedora = require "fedora.common"
\
local arg = {}; for a = 1, tonumber(rpm.expand("%#")) do
table.insert(arg, rpm.expand("%" .. a))
end
local opt = {
["p"] = fedora.readflag("p"),
}
\
local component = arg[1] or error("No name provided!")
local evr = arg[2] or error("No version string provided!")
\
local name = component:gsub("-", "_") -- macro-safe name
\
fedora.explicitset(name .. "_evr", evr)
\
local _, epoch_end, epoch = evr:find("^(%d+):")
fedora.explicitset(name .. "_epoch", epoch)
\
local release_start, _, release = evr:find("%-([^-]+)$")
fedora.explicitset(name .. "_release", release)
\
local version_start, version_end = 0, -1
if epoch_end then version_start = epoch_end + 1 end
if release_start then version_end = release_start -1 end
\
local version = evr:sub(version_start, version_end)
fedora.explicitset(name .. "_version", version)
\
if opt.p then
local parts = {}; for p in version:gmatch("[^.]+") do table.insert(parts, p) end
fedora.explicitset(name .. "_version_major", parts[1])
fedora.explicitset(name .. "_version_minor", parts[2])
fedora.explicitset(name .. "_version_patch", parts[3])
end
}
# --- Declare vendored dependency
# Emits bcond-controlled RPM tags for a (potentially) vendored dependency.
#
# By default, it emits `Provides: bundled(<name>) = <version>` for given arguments.
# If de-vendoring option is provided, also defines a bcond that controls whether to de-vendor or not.
# The default is to de-vendor when possible unless a global bcond (`all_deps_bundled`) is set.
#
# Options:
# -a : Autoversion try using `<name>_version` macro if the version argument is empty.
# -n[npmname,...] : Also provide the respective npm module name when vendoring.
# -p[pkgname,...] : Use pkgconfig to BuildRequire de-vendored dependency.
# -r[rpmname,...] : Also explicitly declare run time requirement.
# -s[rpmname,...] : BuildRequire de-vendored dependency by RPM name.
#
# All above options accept optional parameter overriding the component name in respective tag.
# If needed, multiple values can be requested by separating them with a comma.
#
# When a name is used in a macro context (for example, in the -a option),
# the same name-mangling as for nodejs_define_version is used;
# no need to adjust it by hand.
#
# Arguments:
# 1: Name of the vendored component. Should be appropriate for `Provides: bundled(<name>)` tag.
# 2: Version of the vendored component. Ignored if de-vendored.
%nodejs_declare_bundled(an::p::r::s::) %{lua:
local fedora = require "fedora.common"
\
local arg = {}; for a = 1, tonumber(rpm.expand("%#")) do
table.insert(arg, rpm.expand("%" .. a))
end
local opt = {
["a"] = fedora.hasflag("a"),
["n"] = fedora.readflag("n"),
["p"] = fedora.readflag("p"),
["r"] = fedora.readflag("r"),
["s"] = fedora.readflag("s"),
}
\
local component = arg[1] or error("Vendored component was not named!")
local version = arg[2] or (opt.a and fedora.read(component:gsub("-", "_") .. "_version")) or error("Missing component version!")
\
local mapvalues = function(fn, tbl)
local output = {}; for _, val in ipairs(tbl) do table.insert(output, fn(val)) end; return output
end
local splitnames = function(input)
local output = {}; for m in input:gmatch("[^,]+") do table.insert(output, m) end; return output
end
local nl = string.char(10); -- \n does not work in rpmlua
\
local possible_to_devendor = opt.p or opt.s
local should_devendor = possible_to_devendor and rpm.expand("%{with all_deps_bundled}") == "0"
\
local bcond_name = "bundled_" .. component:gsub("-", "_")
rpm.expand("%bcond " .. bcond_name .. " " .. (should_devendor and "0" or "1"))
\
if rpm.expand("%with " .. bcond_name) == "1" then
local provides = {string.format("bundled(%s) = %s", component, version)}
if opt.n then
local names = {component}; if opt.n ~= "" then names = splitnames(opt.n) end
for _, name in ipairs(names) do
table.insert(provides, string.format("npm(%s) = %s", name, version))
end
end
print("Provides: " .. table.concat(provides, ", "))
else
\
local buildrequire, require = nil, nil
if opt.p then
local format = function(n) return string.format("pkgconfig(%s)", n) end
local names = {component}; if opt.p ~= "" then names = splitnames(opt.p) end
buildrequire = "BuildRequires: " .. table.concat(mapvalues(format, names), ", ")
elseif opt.s then
local names = {component}; if opt.s ~= "" then names = splitnames(opt.s) end
buildrequire = "BuildRequires: " .. table.concat(names, ", ")
end
if opt.r then
local names = {component}; if opt.r ~= "" then names = splitnames(opt.r) end
require = "Requires: " .. table.concat(names, ", ")
end
\
print(table.concat({buildrequire, require}, nl))
end
}

View File

@ -1,2 +0,0 @@
%__nodejs_native_requires %{_rpmconfigdir}/nodejs_native.req
%__nodejs_native_path ^/usr/lib.*/node_modules/.*\\.node$

View File

@ -1 +0,0 @@
prefix=/usr/local

7
SOURCES/npmrc.in Normal file
View File

@ -0,0 +1,7 @@
# This is the distribution-level configuration file for npm.
# To configure npm on a system level, use the globalconfig below (defaults to @SYSCONFDIR@/npmrc).
# vim:set filetype=dosini:
globalconfig=@SYSCONFDIR@/npmrc
prefix=/usr/local
update-notifier=false

61
SOURCES/test-runner.sh Executable file
View File

@ -0,0 +1,61 @@
#!/bin/bash
NODE_BIN="$1"
PARENT_TEST_FOLDER="$2"
TEST_LIST_FILE="$3"
# At most 10 min per test
TIMEOUT_DURATION=600
# Exit code
FINAL_RESULT=0
ARCH=$(uname -m)
echo "Started test run:"
# Run the list of test
while IFS= read -r test_line; do
# ignore commented lines
if [[ "$test_line" =~ ^# ]]; then
continue
fi
# If test has specified ARCH which it should be skipped
# Extract it
TEST_PATH=$(echo "$test_line" | awk '{print $1}')
IGNORE_ARCHES=$(echo "$test_line" |\
awk '{for (i=2; i<=NF; i++) printf "%s ", $i; print ""}')
# Skip test for specified ARCH
for ARCH_IGNORE in $IGNORE_ARCHES; do
if [[ "$ARCH_IGNORE" == "$ARCH" ]]; then
echo "Skipping test, current arch is in ignore: $TEST_PATH ($ARCH_IGNORE)"
continue 2
fi
done
# Construct test path
TEST_SCRIPT="$PARENT_TEST_FOLDER/$TEST_PATH"
if [ ! -f "$TEST_SCRIPT" ]; then
echo "Test script not found: $TEST_SCRIPT"
continue
fi
TEST_OUTPUT=$(timeout "$TIMEOUT_DURATION" "$NODE_BIN" "$TEST_SCRIPT" 2>&1)
TEST_RESULT=$?
# Handle test result
if [ $TEST_RESULT -ne 0 ]; then
FINAL_RESULT=1
if [ $TEST_RESULT -eq 124 ]; then
echo "Test timed out: $TEST_SCRIPT"
else
echo "Test failed: $TEST_SCRIPT"
fi
echo "Test failure message:"
echo "$TEST_OUTPUT"
fi
done < "$TEST_LIST_FILE"
if [ $FINAL_RESULT -eq 0 ]; then
echo "All tests succesfully passed."
fi
exit $FINAL_RESULT

2743
SOURCES/test-should-pass.txt Normal file

File diff suppressed because it is too large Load Diff

9
SOURCES/v8.pc.in Normal file
View File

@ -0,0 +1,9 @@
prefix=@PREFIX@
includedir=@INCLUDEDIR@
libdir=@LIBDIR@
Name: @PKGCONFNAME@
Description: JavaScript Runtime
Version: @V8_VERSION@
Libs: -L${libdir} -lv8
Cflags: -I${includedir}

File diff suppressed because it is too large Load Diff