Import from CS git

This commit is contained in:
eabdullin 2025-11-05 08:05:34 +00:00
parent 133b954d80
commit 3d1570a8b5
20 changed files with 3849 additions and 1256 deletions

6
.gitignore vendored
View File

@ -1,2 +1,4 @@
SOURCES/icu4c-64_2-src.tgz
SOURCES/node-v10.24.0-stripped.tar.gz
SOURCES/icu4c-77_1-data-bin-b.zip
SOURCES/icu4c-77_1-data-bin-l.zip
SOURCES/node-v24.4.1-stripped.tar.gz
SOURCES/packaging-scripts.tar.gz

View File

@ -1,2 +1,4 @@
3127155ecf2b75ab4835f501b7478e39c07bb852 SOURCES/icu4c-64_2-src.tgz
be0e0b385a852c376f452b3d94727492e05407e4 SOURCES/node-v10.24.0-stripped.tar.gz
c459faa36dedc60af6a0c6d5b9b84b6198389bf0 SOURCES/icu4c-77_1-data-bin-b.zip
c602459f93a43dfe7440686b46430e93a85dfc06 SOURCES/icu4c-77_1-data-bin-l.zip
1a7cc2ee75998e4207071e177ed9e870b3c1f2b7 SOURCES/node-v24.4.1-stripped.tar.gz
4dc40674c8b9a94ba977ea7b018f9a92c84b9b53 SOURCES/packaging-scripts.tar.gz

View File

@ -1,31 +0,0 @@
From 2cd4c12776af3da588231d3eb498e6451c30eae5 Mon Sep 17 00:00:00 2001
From: Zuzana Svetlikova <zsvetlik@redhat.com>
Date: Thu, 27 Apr 2017 14:25:42 +0200
Subject: [PATCH] Disable running gyp on shared deps
Signed-off-by: rpm-build <rpm-build>
---
Makefile | 7 +++----
1 file changed, 3 insertions(+), 4 deletions(-)
diff --git a/Makefile b/Makefile
index 73feb4c..45bbceb 100644
--- a/Makefile
+++ b/Makefile
@@ -123,10 +123,9 @@ with-code-cache:
test-code-cache: with-code-cache
$(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=$(BUILDTYPE_LOWER) code-cache
-out/Makefile: common.gypi deps/uv/uv.gyp deps/http_parser/http_parser.gyp \
- deps/zlib/zlib.gyp deps/v8/gypfiles/toolchain.gypi \
- deps/v8/gypfiles/features.gypi deps/v8/gypfiles/v8.gyp node.gyp \
- config.gypi
+out/Makefile: common.gypi deps/http_parser/http_parser.gyp \
+ deps/v8/gypfiles/toolchain.gypi deps/v8/gypfiles/features.gypi \
+ deps/v8/gypfiles/v8.gyp node.gyp config.gypi
$(PYTHON) tools/gyp_node.py -f make
config.gypi: configure configure.py
--
2.26.2

View File

@ -0,0 +1,46 @@
From e93d9b5fdcd8e5744de629461c03a07de2252f8f Mon Sep 17 00:00:00 2001
From: Stephen Gallagher <sgallagh@redhat.com>
Date: Fri, 17 Apr 2020 12:59:44 +0200
Subject: [PATCH] Remove unused OpenSSL config
The build process will try to create these config files, even when
using the system OpenSSL and will thus fail since we strip this path
from the tarball.
Signed-off-by: Stephen Gallagher <sgallagh@redhat.com>
Signed-off-by: rpm-build <rpm-build>
---
node.gyp | 17 -----------------
1 file changed, 17 deletions(-)
diff --git a/node.gyp b/node.gyp
index 1147495..da6ea50 100644
--- a/node.gyp
+++ b/node.gyp
@@ -822,23 +822,6 @@
],
},
],
- }, {
- 'variables': {
- 'opensslconfig_internal': '<(obj_dir)/deps/openssl/openssl.cnf',
- 'opensslconfig': './deps/openssl/nodejs-openssl.cnf',
- },
- 'actions': [
- {
- 'action_name': 'reset_openssl_cnf',
- 'inputs': [ '<(opensslconfig)', ],
- 'outputs': [ '<(opensslconfig_internal)', ],
- 'action': [
- '<(python)', 'tools/copyfile.py',
- '<(opensslconfig)',
- '<(opensslconfig_internal)',
- ],
- },
- ],
}],
],
}, # node_core_target_name
--
2.47.0

View File

@ -1,84 +0,0 @@
From e7afb2d6e2a6c8f9c9c32e12a10c3c5c4902a251 Mon Sep 17 00:00:00 2001
From: Stephen Gallagher <sgallagh@redhat.com>
Date: Tue, 1 May 2018 08:05:30 -0400
Subject: [PATCH] Suppress NPM message to run global update
Signed-off-by: Stephen Gallagher <sgallagh@redhat.com>
Signed-off-by: rpm-build <rpm-build>
---
deps/npm/bin/npm-cli.js | 54 -----------------------------------------
1 file changed, 54 deletions(-)
diff --git a/deps/npm/bin/npm-cli.js b/deps/npm/bin/npm-cli.js
index c0d9be0..0f0892e 100755
--- a/deps/npm/bin/npm-cli.js
+++ b/deps/npm/bin/npm-cli.js
@@ -71,65 +71,11 @@
npm.command = 'help'
}
- var isGlobalNpmUpdate = conf.global && ['install', 'update'].includes(npm.command) && npm.argv.includes('npm')
-
// now actually fire up npm and run the command.
// this is how to use npm programmatically:
conf._exit = true
npm.load(conf, function (er) {
if (er) return errorHandler(er)
- if (
- !isGlobalNpmUpdate &&
- npm.config.get('update-notifier') &&
- !unsupported.checkVersion(process.version).unsupported
- ) {
- const pkg = require('../package.json')
- let notifier = require('update-notifier')({pkg})
- const isCI = require('ci-info').isCI
- if (
- notifier.update &&
- notifier.update.latest !== pkg.version &&
- !isCI
- ) {
- const color = require('ansicolors')
- const useColor = npm.config.get('color')
- const useUnicode = npm.config.get('unicode')
- const old = notifier.update.current
- const latest = notifier.update.latest
- let type = notifier.update.type
- if (useColor) {
- switch (type) {
- case 'major':
- type = color.red(type)
- break
- case 'minor':
- type = color.yellow(type)
- break
- case 'patch':
- type = color.green(type)
- break
- }
- }
- const changelog = `https://github.com/npm/cli/releases/tag/v${latest}`
- notifier.notify({
- message: `New ${type} version of ${pkg.name} available! ${
- useColor ? color.red(old) : old
- } ${useUnicode ? '→' : '->'} ${
- useColor ? color.green(latest) : latest
- }\n` +
- `${
- useColor ? color.yellow('Changelog:') : 'Changelog:'
- } ${
- useColor ? color.cyan(changelog) : changelog
- }\n` +
- `Run ${
- useColor
- ? color.green(`npm install -g ${pkg.name}`)
- : `npm i -g ${pkg.name}`
- } to update!`
- })
- }
- }
npm.commands[npm.command](npm.argv, function (err) {
// https://genius.com/Lin-manuel-miranda-your-obedient-servant-lyrics
if (
--
2.26.2

View File

@ -0,0 +1,84 @@
From 98738d27288bd9ca634e29181ef665e812e7bbd3 Mon Sep 17 00:00:00 2001
From: Michael Dawson <midawson@redhat.com>
Date: Fri, 23 Feb 2024 13:43:56 +0100
Subject: [PATCH] Disable FIPS options
On RHEL, FIPS should be configured only on system level.
Additionally, the related options may cause segfault when used on RHEL.
This patch causes the option processing to end sooner
than the problematic code gets executed.
Additionally, the JS-level options to mess with FIPS settings
are similarly disabled.
Upstream report: https://github.com/nodejs/node/pull/48950
RHBZ: https://bugzilla.redhat.com/show_bug.cgi?id=2226726
---
lib/crypto.js | 10 ++++++++++
lib/internal/errors.js | 6 ++++++
src/crypto/crypto_util.cc | 2 ++
3 files changed, 18 insertions(+)
diff --git a/lib/crypto.js b/lib/crypto.js
index 41adecc..b2627ac 100644
--- a/lib/crypto.js
+++ b/lib/crypto.js
@@ -36,7 +36,10 @@ const {
assertCrypto();
const {
+ // RHEL specific error
+ ERR_CRYPTO_FIPS_SYSTEM_CONTROLLED,
+
ERR_CRYPTO_FIPS_FORCED,
ERR_WORKER_UNSUPPORTED_OPERATION,
} = require('internal/errors').codes;
const constants = internalBinding('constants').crypto;
@@ -251,6 +254,13 @@ function getFips() {
}
function setFips(val) {
+ // in RHEL FIPS enable/disable should only be done at system level
+ if (getFips() != val) {
+ throw new ERR_CRYPTO_FIPS_SYSTEM_CONTROLLED();
+ } else {
+ return;
+ }
+
if (getOptionValue('--force-fips')) {
if (val) return;
throw new ERR_CRYPTO_FIPS_FORCED();
diff --git a/lib/internal/errors.js b/lib/internal/errors.js
index a722360..04d8a53 100644
--- a/lib/internal/errors.js
+++ b/lib/internal/errors.js
@@ -1111,6 +1111,12 @@ module.exports = {
//
// Note: Node.js specific errors must begin with the prefix ERR_
+// insert RHEL specific erro
+E('ERR_CRYPTO_FIPS_SYSTEM_CONTROLLED',
+ 'Cannot set FIPS mode. FIPS should be enabled/disabled at system level. See' +
+ 'https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/9/html/security_hardening/assembly_installing-the-system-in-fips-mode_security-hardening for more details.\n',
+ Error);
+
E('ERR_ACCESS_DENIED',
function(msg, permission = '', resource = '') {
this.permission = permission;
diff --git a/src/crypto/crypto_util.cc b/src/crypto/crypto_util.cc
index 5734d8f..ef9d1b1 100644
--- a/src/crypto/crypto_util.cc
+++ b/src/crypto/crypto_util.cc
@@ -86,6 +86,8 @@ bool ProcessFipsOptions() {
/* Override FIPS settings in configuration file, if needed. */
if (per_process::cli_options->enable_fips_crypto ||
per_process::cli_options->force_fips_crypto) {
+ fprintf(stderr, "ERROR: Using options related to FIPS is not recommended, configure FIPS in openssl instead. See https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/9/html/security_hardening/assembly_installing-the-system-in-fips-mode_security-hardening for more details.\n");
+ return false;
#if OPENSSL_VERSION_MAJOR >= 3
if (!ncrypto::testFipsEnabled()) return false;
return ncrypto::setFipsEnabled(true, nullptr);
--
2.43.2

View File

@ -1,122 +0,0 @@
From 0028cc74dac4dd24b8599ade85cb49fdafa9f559 Mon Sep 17 00:00:00 2001
From: Stephen Gallagher <sgallagh@redhat.com>
Date: Fri, 6 Dec 2019 16:40:25 -0500
Subject: [PATCH] build: auto-load ICU data from --with-icu-default-data-dir
When compiled with `--with-intl=small` and
`--with-icu-default-data-dir=PATH`, Node.js will use PATH as a
fallback location for the ICU data.
We will first perform an access check using fopen(PATH, 'r') to
ensure that the file is readable. If it is, we'll set the
icu_data_directory and proceed. There's a slight overhead for the
fopen() check, but it should be barely measurable.
This will be useful for Linux distribution packagers who want to
be able to ship a minimal node binary in a container image but
also be able to add on the full i18n support where needed. With
this patch, it becomes possible to ship the interpreter as
/usr/bin/node in one package for the distribution and to ship the
data files in another package (without a strict dependency
between the two). This means that users of the distribution will
not need to explicitly direct Node.js to locate the ICU data. It
also means that in environments where full internationalization is
not required, they do not need to carry the extra content (with
the associated storage costs).
Refs: https://github.com/nodejs/node/issues/3460
Signed-off-by: Stephen Gallagher <sgallagh@redhat.com>
Signed-off-by: rpm-build <rpm-build>
---
configure.py | 9 +++++++++
node.gypi | 7 +++++++
src/node.cc | 20 ++++++++++++++++++++
3 files changed, 36 insertions(+)
diff --git a/configure.py b/configure.py
index 89f7bf5..d611a88 100755
--- a/configure.py
+++ b/configure.py
@@ -433,6 +433,14 @@ intl_optgroup.add_option('--with-icu-source',
'the icu4c source archive. '
'v%d.x or later recommended.' % icu_versions['minimum_icu'])
+intl_optgroup.add_option('--with-icu-default-data-dir',
+ action='store',
+ dest='with_icu_default_data_dir',
+ help='Path to the icuXXdt{lb}.dat file. If unspecified, ICU data will '
+ 'only be read if the NODE_ICU_DATA environment variable or the '
+ '--icu-data-dir runtime argument is used. This option has effect '
+ 'only when Node.js is built with --with-intl=small-icu.')
+
parser.add_option('--with-ltcg',
action='store_true',
dest='with_ltcg',
@@ -1359,6 +1367,7 @@ def configure_intl(o):
locs.add('root') # must have root
o['variables']['icu_locales'] = string.join(locs,',')
# We will check a bit later if we can use the canned deps/icu-small
+ o['variables']['icu_default_data'] = options.with_icu_default_data_dir or ''
elif with_intl == 'full-icu':
# full ICU
o['variables']['v8_enable_i18n_support'] = 1
diff --git a/node.gypi b/node.gypi
index 466a174..65b97d6 100644
--- a/node.gypi
+++ b/node.gypi
@@ -113,6 +113,13 @@
'conditions': [
[ 'icu_small=="true"', {
'defines': [ 'NODE_HAVE_SMALL_ICU=1' ],
+ 'conditions': [
+ [ 'icu_default_data!=""', {
+ 'defines': [
+ 'NODE_ICU_DEFAULT_DATA_DIR="<(icu_default_data)"',
+ ],
+ }],
+ ],
}]],
}],
[ 'node_use_bundled_v8=="true" and \
diff --git a/src/node.cc b/src/node.cc
index 7c01187..c9840e3 100644
--- a/src/node.cc
+++ b/src/node.cc
@@ -92,6 +92,7 @@
#if defined(NODE_HAVE_I18N_SUPPORT)
#include <unicode/uvernum.h>
+#include <unicode/utypes.h>
#endif
#if defined(LEAK_SANITIZER)
@@ -2643,6 +2644,25 @@ void Init(std::vector<std::string>* argv,
// If the parameter isn't given, use the env variable.
if (per_process_opts->icu_data_dir.empty())
SafeGetenv("NODE_ICU_DATA", &per_process_opts->icu_data_dir);
+
+#ifdef NODE_ICU_DEFAULT_DATA_DIR
+ // If neither the CLI option nor the environment variable was specified,
+ // fall back to the configured default
+ if (per_process_opts->icu_data_dir.empty()) {
+ // Check whether the NODE_ICU_DEFAULT_DATA_DIR contains the right data
+ // file and can be read.
+ static const char full_path[] =
+ NODE_ICU_DEFAULT_DATA_DIR "/" U_ICUDATA_NAME ".dat";
+
+ FILE* f = fopen(full_path, "rb");
+
+ if (f != nullptr) {
+ fclose(f);
+ per_process_opts->icu_data_dir = NODE_ICU_DEFAULT_DATA_DIR;
+ }
+ }
+#endif // NODE_ICU_DEFAULT_DATA_DIR
+
// Initialize ICU.
// If icu_data_dir is empty here, it will load the 'minimal' data.
if (!i18n::InitializeICUDirectory(per_process_opts->icu_data_dir)) {
--
2.26.2

View File

@ -1,13 +0,0 @@
diff --git a/deps/npm/node_modules/y18n/index.js b/deps/npm/node_modules/y18n/index.js
index d720681628..727362aac0 100644
--- a/deps/npm/node_modules/y18n/index.js
+++ b/deps/npm/node_modules/y18n/index.js
@@ -11,7 +11,7 @@ function Y18N (opts) {
this.fallbackToLanguage = typeof opts.fallbackToLanguage === 'boolean' ? opts.fallbackToLanguage : true
// internal stuff.
- this.cache = {}
+ this.cache = Object.create(null)
this.writeQueue = []
}

View File

@ -0,0 +1,265 @@
From 4208b7849eeee5c2aa76d692e2624bd80422057d Mon Sep 17 00:00:00 2001
From: John Platts <john_platts@hotmail.com>
Date: Fri, 17 Jan 2025 12:16:49 -0600
Subject: [PATCH] v8(highway): Fix for GCC 15 compiler error on PPC8/PPC9/PPC10
Signed-off-by: rpm-build <rpm-build>
---
.../highway/src/hwy/ops/ppc_vsx-inl.h | 167 +++++++++++-------
1 file changed, 103 insertions(+), 64 deletions(-)
diff --git a/deps/v8/third_party/highway/src/hwy/ops/ppc_vsx-inl.h b/deps/v8/third_party/highway/src/hwy/ops/ppc_vsx-inl.h
index d216c54..73e736e 100644
--- a/deps/v8/third_party/highway/src/hwy/ops/ppc_vsx-inl.h
+++ b/deps/v8/third_party/highway/src/hwy/ops/ppc_vsx-inl.h
@@ -3701,16 +3701,73 @@ static HWY_INLINE V VsxF2INormalizeSrcVals(V v) {
#endif
}
+template <class VF32>
+static HWY_INLINE HWY_MAYBE_UNUSED VFromD<Repartition<int64_t, DFromV<VF32>>>
+VsxXvcvspsxds(VF32 vf32) {
+ using VI64 = VFromD<Repartition<int64_t, DFromV<VF32>>>;
+#if (HWY_COMPILER_GCC_ACTUAL && HWY_COMPILER_GCC_ACTUAL < 1500) || \
+ HWY_HAS_BUILTIN(__builtin_vsx_xvcvspsxds)
+ // Use __builtin_vsx_xvcvspsxds if it is available (which is the case with
+ // GCC 4.8 through GCC 14 or Clang 13 or later on PPC8/PPC9/PPC10)
+ return VI64{__builtin_vsx_xvcvspsxds(vf32.raw)};
+#elif HWY_COMPILER_GCC_ACTUAL >= 1500 && HWY_IS_LITTLE_ENDIAN
+ // On little-endian PPC8/PPC9/PPC10 with GCC 15 or later, use the F32->I64
+ // vec_signedo intrinsic as the __builtin_vsx_xvcvspsxds intrinsic has been
+ // removed from GCC in GCC 15
+ return VI64{vec_signedo(vf32.raw)};
+#elif HWY_COMPILER_GCC_ACTUAL >= 1500 && HWY_IS_BIG_ENDIAN
+ // On big-endian PPC8/PPC9/PPC10 with GCC 15 or later, use the F32->I64
+ // vec_signede intrinsic as the __builtin_vsx_xvcvspsxds intrinsic has been
+ // removed from GCC in GCC 15
+ return VI64{vec_signede(vf32.raw)};
+#else
+ // Inline assembly fallback for older versions of Clang that do not have the
+ // __builtin_vsx_xvcvspsxds intrinsic
+ __vector signed long long raw_result;
+ __asm__("xvcvspsxds %x0, %x1" : "=wa"(raw_result) : "wa"(vf32.raw) :);
+ return VI64{raw_result};
+#endif
+}
+
+template <class VF32>
+static HWY_INLINE HWY_MAYBE_UNUSED VFromD<Repartition<uint64_t, DFromV<VF32>>>
+VsxXvcvspuxds(VF32 vf32) {
+ using VU64 = VFromD<Repartition<uint64_t, DFromV<VF32>>>;
+#if (HWY_COMPILER_GCC_ACTUAL && HWY_COMPILER_GCC_ACTUAL < 1500) || \
+ HWY_HAS_BUILTIN(__builtin_vsx_xvcvspuxds)
+ // Use __builtin_vsx_xvcvspuxds if it is available (which is the case with
+ // GCC 4.8 through GCC 14 or Clang 13 or later on PPC8/PPC9/PPC10)
+ return VU64{reinterpret_cast<__vector unsigned long long>(
+ __builtin_vsx_xvcvspuxds(vf32.raw))};
+#elif HWY_COMPILER_GCC_ACTUAL >= 1500 && HWY_IS_LITTLE_ENDIAN
+ // On little-endian PPC8/PPC9/PPC10 with GCC 15 or later, use the F32->U64
+ // vec_unsignedo intrinsic as the __builtin_vsx_xvcvspuxds intrinsic has been
+ // removed from GCC in GCC 15
+ return VU64{vec_unsignedo(vf32.raw)};
+#elif HWY_COMPILER_GCC_ACTUAL >= 1500 && HWY_IS_BIG_ENDIAN
+ // On big-endian PPC8/PPC9/PPC10 with GCC 15 or later, use the F32->U64
+ // vec_unsignedo intrinsic as the __builtin_vsx_xvcvspuxds intrinsic has been
+ // removed from GCC in GCC 15
+ return VU64{vec_unsignede(vf32.raw)};
+#else
+ // Inline assembly fallback for older versions of Clang that do not have the
+ // __builtin_vsx_xvcvspuxds intrinsic
+ __vector unsigned long long raw_result;
+ __asm__("xvcvspuxds %x0, %x1" : "=wa"(raw_result) : "wa"(vf32.raw) :);
+ return VU64{raw_result};
+#endif
+}
+
} // namespace detail
#endif // !HWY_S390X_HAVE_Z14
template <class D, HWY_IF_I64_D(D)>
HWY_API VFromD<D> PromoteTo(D di64, VFromD<Rebind<float, D>> v) {
-#if !HWY_S390X_HAVE_Z14 && \
- (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspsxds))
- const __vector float raw_v =
- detail::VsxF2INormalizeSrcVals(InterleaveLower(v, v)).raw;
- return VFromD<decltype(di64)>{__builtin_vsx_xvcvspsxds(raw_v)};
+#if !HWY_S390X_HAVE_Z14
+ const Repartition<float, decltype(di64)> dt_f32;
+ const auto vt_f32 = ResizeBitCast(dt_f32, v);
+ return detail::VsxXvcvspsxds(
+ detail::VsxF2INormalizeSrcVals(InterleaveLower(vt_f32, vt_f32)));
#else
const RebindToFloat<decltype(di64)> df64;
return ConvertTo(di64, PromoteTo(df64, v));
@@ -3719,12 +3776,11 @@ HWY_API VFromD<D> PromoteTo(D di64, VFromD<Rebind<float, D>> v) {
template <class D, HWY_IF_U64_D(D)>
HWY_API VFromD<D> PromoteTo(D du64, VFromD<Rebind<float, D>> v) {
-#if !HWY_S390X_HAVE_Z14 && \
- (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspuxds))
- const __vector float raw_v =
- detail::VsxF2INormalizeSrcVals(InterleaveLower(v, v)).raw;
- return VFromD<decltype(du64)>{reinterpret_cast<__vector unsigned long long>(
- __builtin_vsx_xvcvspuxds(raw_v))};
+#if !HWY_S390X_HAVE_Z14
+ const Repartition<float, decltype(du64)> dt_f32;
+ const auto vt_f32 = ResizeBitCast(dt_f32, v);
+ return detail::VsxXvcvspuxds(
+ detail::VsxF2INormalizeSrcVals(InterleaveLower(vt_f32, vt_f32)));
#else
const RebindToFloat<decltype(du64)> df64;
return ConvertTo(du64, PromoteTo(df64, v));
@@ -3829,12 +3885,10 @@ HWY_API VFromD<D> PromoteUpperTo(D df64, Vec128<uint32_t> v) {
template <class D, HWY_IF_V_SIZE_D(D, 16), HWY_IF_I64_D(D)>
HWY_API VFromD<D> PromoteUpperTo(D di64, Vec128<float> v) {
-#if !HWY_S390X_HAVE_Z14 && \
- (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspsxds))
- const __vector float raw_v =
- detail::VsxF2INormalizeSrcVals(InterleaveUpper(Full128<float>(), v, v))
- .raw;
- return VFromD<decltype(di64)>{__builtin_vsx_xvcvspsxds(raw_v)};
+#if !HWY_S390X_HAVE_Z14
+ (void)di64;
+ return detail::VsxXvcvspsxds(
+ detail::VsxF2INormalizeSrcVals(InterleaveUpper(Full128<float>(), v, v)));
#else
const RebindToFloat<decltype(di64)> df64;
return ConvertTo(di64, PromoteUpperTo(df64, v));
@@ -3843,13 +3897,10 @@ HWY_API VFromD<D> PromoteUpperTo(D di64, Vec128<float> v) {
template <class D, HWY_IF_V_SIZE_D(D, 16), HWY_IF_U64_D(D)>
HWY_API VFromD<D> PromoteUpperTo(D du64, Vec128<float> v) {
-#if !HWY_S390X_HAVE_Z14 && \
- (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspuxds))
- const __vector float raw_v =
- detail::VsxF2INormalizeSrcVals(InterleaveUpper(Full128<float>(), v, v))
- .raw;
- return VFromD<decltype(du64)>{reinterpret_cast<__vector unsigned long long>(
- __builtin_vsx_xvcvspuxds(raw_v))};
+#if !HWY_S390X_HAVE_Z14
+ (void)du64;
+ return detail::VsxXvcvspuxds(
+ detail::VsxF2INormalizeSrcVals(InterleaveUpper(Full128<float>(), v, v)));
#else
const RebindToFloat<decltype(du64)> df64;
return ConvertTo(du64, PromoteUpperTo(df64, v));
@@ -3937,20 +3988,18 @@ HWY_INLINE VFromD<D> PromoteEvenTo(hwy::SignedTag /*to_type_tag*/,
hwy::SizeTag<8> /*to_lane_size_tag*/,
hwy::FloatTag /*from_type_tag*/, D d_to,
V v) {
-#if !HWY_S390X_HAVE_Z14 && \
- (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspsxds))
+#if !HWY_S390X_HAVE_Z14
(void)d_to;
const auto normalized_v = detail::VsxF2INormalizeSrcVals(v);
#if HWY_IS_LITTLE_ENDIAN
- // __builtin_vsx_xvcvspsxds expects the source values to be in the odd lanes
- // on little-endian PPC, and the vec_sld operation below will shift the even
+ // VsxXvcvspsxds expects the source values to be in the odd lanes on
+ // little-endian PPC, and the Shuffle2103 operation below will shift the even
// lanes of normalized_v into the odd lanes.
- return VFromD<D>{
- __builtin_vsx_xvcvspsxds(vec_sld(normalized_v.raw, normalized_v.raw, 4))};
+ return VsxXvcvspsxds(Shuffle2103(normalized_v));
#else
- // __builtin_vsx_xvcvspsxds expects the source values to be in the even lanes
- // on big-endian PPC.
- return VFromD<D>{__builtin_vsx_xvcvspsxds(normalized_v.raw)};
+ // VsxXvcvspsxds expects the source values to be in the even lanes on
+ // big-endian PPC.
+ return VsxXvcvspsxds(normalized_v);
#endif
#else
const RebindToFloat<decltype(d_to)> df64;
@@ -3965,22 +4014,18 @@ HWY_INLINE VFromD<D> PromoteEvenTo(hwy::UnsignedTag /*to_type_tag*/,
hwy::SizeTag<8> /*to_lane_size_tag*/,
hwy::FloatTag /*from_type_tag*/, D d_to,
V v) {
-#if !HWY_S390X_HAVE_Z14 && \
- (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspuxds))
+#if !HWY_S390X_HAVE_Z14
(void)d_to;
const auto normalized_v = detail::VsxF2INormalizeSrcVals(v);
#if HWY_IS_LITTLE_ENDIAN
- // __builtin_vsx_xvcvspuxds expects the source values to be in the odd lanes
- // on little-endian PPC, and the vec_sld operation below will shift the even
- // lanes of normalized_v into the odd lanes.
- return VFromD<D>{
- reinterpret_cast<__vector unsigned long long>(__builtin_vsx_xvcvspuxds(
- vec_sld(normalized_v.raw, normalized_v.raw, 4)))};
+ // VsxXvcvspuxds expects the source values to be in the odd lanes
+ // on little-endian PPC, and the Shuffle2103 operation below will shift the
+ // even lanes of normalized_v into the odd lanes.
+ return VsxXvcvspuxds(Shuffle2103(normalized_v));
#else
- // __builtin_vsx_xvcvspuxds expects the source values to be in the even lanes
+ // VsxXvcvspuxds expects the source values to be in the even lanes
// on big-endian PPC.
- return VFromD<D>{reinterpret_cast<__vector unsigned long long>(
- __builtin_vsx_xvcvspuxds(normalized_v.raw))};
+ return VsxXvcvspuxds(normalized_v);
#endif
#else
const RebindToFloat<decltype(d_to)> df64;
@@ -4022,20 +4067,18 @@ HWY_INLINE VFromD<D> PromoteOddTo(hwy::SignedTag /*to_type_tag*/,
hwy::SizeTag<8> /*to_lane_size_tag*/,
hwy::FloatTag /*from_type_tag*/, D d_to,
V v) {
-#if !HWY_S390X_HAVE_Z14 && \
- (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspsxds))
+#if !HWY_S390X_HAVE_Z14
(void)d_to;
const auto normalized_v = detail::VsxF2INormalizeSrcVals(v);
#if HWY_IS_LITTLE_ENDIAN
- // __builtin_vsx_xvcvspsxds expects the source values to be in the odd lanes
+ // VsxXvcvspsxds expects the source values to be in the odd lanes
// on little-endian PPC
- return VFromD<D>{__builtin_vsx_xvcvspsxds(normalized_v.raw)};
+ return VsxXvcvspsxds(normalized_v);
#else
- // __builtin_vsx_xvcvspsxds expects the source values to be in the even lanes
- // on big-endian PPC, and the vec_sld operation below will shift the odd lanes
- // of normalized_v into the even lanes.
- return VFromD<D>{
- __builtin_vsx_xvcvspsxds(vec_sld(normalized_v.raw, normalized_v.raw, 4))};
+ // VsxXvcvspsxds expects the source values to be in the even lanes
+ // on big-endian PPC, and the Shuffle0321 operation below will shift the odd
+ // lanes of normalized_v into the even lanes.
+ return VsxXvcvspsxds(Shuffle0321(normalized_v));
#endif
#else
const RebindToFloat<decltype(d_to)> df64;
@@ -4050,22 +4093,18 @@ HWY_INLINE VFromD<D> PromoteOddTo(hwy::UnsignedTag /*to_type_tag*/,
hwy::SizeTag<8> /*to_lane_size_tag*/,
hwy::FloatTag /*from_type_tag*/, D d_to,
V v) {
-#if !HWY_S390X_HAVE_Z14 && \
- (HWY_COMPILER_GCC_ACTUAL || HWY_HAS_BUILTIN(__builtin_vsx_xvcvspuxds))
+#if !HWY_S390X_HAVE_Z14
(void)d_to;
const auto normalized_v = detail::VsxF2INormalizeSrcVals(v);
#if HWY_IS_LITTLE_ENDIAN
- // __builtin_vsx_xvcvspuxds expects the source values to be in the odd lanes
+ // VsxXvcvspuxds expects the source values to be in the odd lanes
// on little-endian PPC
- return VFromD<D>{reinterpret_cast<__vector unsigned long long>(
- __builtin_vsx_xvcvspuxds(normalized_v.raw))};
+ return VsxXvcvspuxds(normalized_v);
#else
- // __builtin_vsx_xvcvspuxds expects the source values to be in the even lanes
- // on big-endian PPC, and the vec_sld operation below will shift the odd lanes
- // of normalized_v into the even lanes.
- return VFromD<D>{
- reinterpret_cast<__vector unsigned long long>(__builtin_vsx_xvcvspuxds(
- vec_sld(normalized_v.raw, normalized_v.raw, 4)))};
+ // VsxXvcvspuxds expects the source values to be in the even lanes
+ // on big-endian PPC, and the Shuffle0321 operation below will shift the odd
+ // lanes of normalized_v into the even lanes.
+ return VsxXvcvspuxds(Shuffle0321(normalized_v));
#endif
#else
const RebindToFloat<decltype(d_to)> df64;
--
2.50.0

View File

@ -1,189 +0,0 @@
#!/bin/sh
# Uses Argbash to generate command argument parsing. To update
# arguments, make sure to call
# `argbash nodejs-tarball.sh -o nodejs-tarball.sh`
# ARG_POSITIONAL_SINGLE([version],[Node.js release version],[""])
# ARG_DEFAULTS_POS([])
# ARG_HELP([Tool to aid in Node.js packaging of new releases])
# ARGBASH_GO()
# needed because of Argbash --> m4_ignore([
### START OF CODE GENERATED BY Argbash v2.8.1 one line above ###
# Argbash is a bash code generator used to get arguments parsing right.
# Argbash is FREE SOFTWARE, see https://argbash.io for more info
die()
{
local _ret=$2
test -n "$_ret" || _ret=1
test "$_PRINT_HELP" = yes && print_help >&2
echo "$1" >&2
exit ${_ret}
}
begins_with_short_option()
{
local first_option all_short_options='h'
first_option="${1:0:1}"
test "$all_short_options" = "${all_short_options/$first_option/}" && return 1 || return 0
}
# THE DEFAULTS INITIALIZATION - POSITIONALS
_positionals=()
_arg_version=""
# THE DEFAULTS INITIALIZATION - OPTIONALS
print_help()
{
printf '%s\n' "Tool to aid in Node.js packaging of new releases"
printf 'Usage: %s [-h|--help] [<version>]\n' "$0"
printf '\t%s\n' "<version>: Node.js release version (default: '""')"
printf '\t%s\n' "-h, --help: Prints help"
}
parse_commandline()
{
_positionals_count=0
while test $# -gt 0
do
_key="$1"
case "$_key" in
-h|--help)
print_help
exit 0
;;
-h*)
print_help
exit 0
;;
*)
_last_positional="$1"
_positionals+=("$_last_positional")
_positionals_count=$((_positionals_count + 1))
;;
esac
shift
done
}
handle_passed_args_count()
{
test "${_positionals_count}" -le 1 || _PRINT_HELP=yes die "FATAL ERROR: There were spurious positional arguments --- we expect between 0 and 1, but got ${_positionals_count} (the last one was: '${_last_positional}')." 1
}
assign_positional_args()
{
local _positional_name _shift_for=$1
_positional_names="_arg_version "
shift "$_shift_for"
for _positional_name in ${_positional_names}
do
test $# -gt 0 || break
eval "$_positional_name=\${1}" || die "Error during argument parsing, possibly an Argbash bug." 1
shift
done
}
parse_commandline "$@"
handle_passed_args_count
assign_positional_args 1 "${_positionals[@]}"
# OTHER STUFF GENERATED BY Argbash
### END OF CODE GENERATED BY Argbash (sortof) ### ])
# [ <-- needed because of Argbash
set -e
echo $_arg_version
if [ x$_arg_version != x ]; then
version=$_arg_version
else
version=$(rpm -q --specfile --qf='%{version}\n' nodejs.spec | head -n1)
fi
rm -f node-v${version}.tar.gz node-v${version}-stripped.tar.gz
wget http://nodejs.org/dist/v${version}/node-v${version}.tar.gz \
http://nodejs.org/dist/v${version}/SHASUMS256.txt
sha256sum -c SHASUMS256.txt --ignore-missing
tar -zxf node-v${version}.tar.gz
rm -rf node-v${version}/deps/openssl
tar -zcf node-v${version}-stripped.tar.gz node-v${version}
# Download the matching version of ICU
rm -f icu4c*-src.tgz icu.md5
ICUMD5=$(cat node-v${version}/tools/icu/current_ver.dep |jq -r '.[0].md5')
wget $(cat node-v${version}/tools/icu/current_ver.dep |jq -r '.[0].url')
ICUTARBALL=$(ls -1 icu4c*-src.tgz)
echo "$ICUMD5 $ICUTARBALL" > icu.md5
md5sum -c icu.md5
rm -f icu.md5 SHASUMS256.txt
rhpkg new-sources node-v${version}-stripped.tar.gz icu4c*-src.tgz
rm -f node-v${version}.tar.gz
set +e
# Determine the bundled versions of the various packages
echo "Bundled software versions"
echo "-------------------------"
echo
echo "libnode shared object version"
echo "========================="
grep "define NODE_MODULE_VERSION" node-v${version}/src/node_version.h
echo
echo "V8"
echo "========================="
grep "define V8_MAJOR_VERSION" node-v${version}/deps/v8/include/v8-version.h
grep "define V8_MINOR_VERSION" node-v${version}/deps/v8/include/v8-version.h
grep "define V8_BUILD_NUMBER" node-v${version}/deps/v8/include/v8-version.h
grep "define V8_PATCH_LEVEL" node-v${version}/deps/v8/include/v8-version.h
echo
echo "c-ares"
echo "========================="
grep "define ARES_VERSION_MAJOR" node-v${version}/deps/cares/include/ares_version.h
grep "define ARES_VERSION_MINOR" node-v${version}/deps/cares/include/ares_version.h
grep "define ARES_VERSION_PATCH" node-v${version}/deps/cares/include/ares_version.h
echo
echo "http-parser"
echo "========================="
grep "define HTTP_PARSER_VERSION_MAJOR" node-v${version}/deps/http_parser/http_parser.h
grep "define HTTP_PARSER_VERSION_MINOR" node-v${version}/deps/http_parser/http_parser.h
grep "define HTTP_PARSER_VERSION_PATCH" node-v${version}/deps/http_parser/http_parser.h
echo
echo "libuv"
echo "========================="
grep "define UV_VERSION_MAJOR" node-v${version}/deps/uv/include/uv/version.h
grep "define UV_VERSION_MINOR" node-v${version}/deps/uv/include/uv/version.h
grep "define UV_VERSION_PATCH" node-v${version}/deps/uv/include/uv/version.h
echo
echo "nghttp2"
echo "========================="
grep "define NGHTTP2_VERSION " node-v${version}/deps/nghttp2/lib/includes/nghttp2/nghttp2ver.h
echo
echo "ICU"
echo "========================="
grep "url" node-v${version}/tools/icu/current_ver.dep
echo
echo "punycode"
echo "========================="
grep "'version'" node-v${version}/lib/punycode.js
echo
echo "npm"
echo "========================="
grep "\"version\":" node-v${version}/deps/npm/package.json
echo
echo "Make sure these versions match what is in the RPM spec file"
rm -rf node-v${version}
# ] <-- needed because of Argbash

9
SOURCES/nodejs.pc.in Normal file
View File

@ -0,0 +1,9 @@
prefix=@PREFIX@
includedir=@INCLUDEDIR@
libdir=@LIBDIR@
Name: @PKGCONFNAME@
Description: JavaScript Runtime
Version: @NODEJS_VERSION@
Libs: -L${libdir} -lnode
Cflags: -I${includedir}/node

167
SOURCES/nodejs.srpm.macros Normal file
View File

@ -0,0 +1,167 @@
# ============================================================================
# Vendored dependencies management
# --- Version macros definition
# Parse and normalize version string into several macros.
# By default, stores the whole string in `%<name>_evr` macro,
# then automatically strips any epoch and/or release parts
# (specified in the standard "E:V-R" format)
# and defines `%<name>_epoch`, `%<name>_version`, and `%<name>_release` macros.
#
# With the `-p` option, the version is additionally split into
# `%<name>_version_major`, `%<name>_version_minor`, and `%<name>_version_patch` macros.
#
# Any would-be empty macro will evaluate to `%{nil}`.
#
# Options:
# -p : Also define the partial macros.
#
# Arguments:
# 1: Name of the dependency. Any `-' will be replaced by `_' in the macro names.
# 2: The EVR string to parse.
%nodejs_define_version(p) %{lua:
local function hasflag(flag) return (rpm.expand("%{-" .. flag .. "}") ~= "") end
local function readflag(flag)
if not hasflag(flag) then return nil end
local value = rpm.expand("%{-" .. flag .. "*}")
if (value == '""') or (value == "''") then value = '' end
return value
end
local function explicitset(rpmvar, value)
if (value == nil) or (value == "") then value = "%{nil}" end
rpm.define(rpmvar .. " " .. value)
end
\
local arg = {}; for a = 1, tonumber(rpm.expand("%#")) do
table.insert(arg, rpm.expand("%" .. a))
end
local opt = {
["p"] = readflag("p"),
}
\
local component = arg[1] or error("No name provided!")
local evr = arg[2] or error("No version string provided!")
\
local name = component:gsub("-", "_") -- macro-safe name
\
explicitset(name .. "_evr", evr)
\
local _, epoch_end, epoch = evr:find("^(%d+):")
explicitset(name .. "_epoch", epoch)
\
local release_start, _, release = evr:find("%-([^-]+)$")
explicitset(name .. "_release", release)
\
local version_start, version_end = 0, -1
if epoch_end then version_start = epoch_end + 1 end
if release_start then version_end = release_start -1 end
\
local version = evr:sub(version_start, version_end)
explicitset(name .. "_version", version)
\
if opt.p then
local parts = {}; for p in version:gmatch("[^.]+") do table.insert(parts, p) end
explicitset(name .. "_version_major", parts[1])
explicitset(name .. "_version_minor", parts[2])
explicitset(name .. "_version_patch", parts[3])
end
}
# --- Declare vendored dependency
# Emits bcond-controlled RPM tags for a (potentially) vendored dependency.
#
# By default, it emits `Provides: bundled(<name>) = <version>` for given arguments.
# If de-vendoring option is provided, also defines a bcond that controls whether to de-vendor or not.
# The default is to de-vendor when possible unless a global bcond (`all_deps_bundled`) is set.
#
# Options:
# -a : Autoversion try using `<name>_version` macro if the version argument is empty.
# -n[npmname,...] : Also provide the respective npm module name when vendoring.
# -p[pkgname,...] : Use pkgconfig to BuildRequire de-vendored dependency.
# -r[rpmname,...] : Also explicitly declare run time requirement.
# -s[rpmname,...] : BuildRequire de-vendored dependency by RPM name.
#
# All above options accept optional parameter overriding the component name in respective tag.
# If needed, multiple values can be requested by separating them with a comma.
#
# When a name is used in a macro context (for example, in the -a option),
# the same name-mangling as for nodejs_define_version is used;
# no need to adjust it by hand.
#
# Arguments:
# 1: Name of the vendored component. Should be appropriate for `Provides: bundled(<name>)` tag.
# 2: Version of the vendored component. Ignored if de-vendored.
%nodejs_declare_bundled(an::p::r::s::) %{lua:
local function read(rpmvar)
if not rpmvar then return nil end
local macro_string = "%{" .. rpmvar .. "}"
if rpm.expand(macro_string) == macro_string then return nil end
return rpm.expand("%{?" .. rpmvar .. "}")
end
local function hasflag(flag) return (rpm.expand("%{-" .. flag .. "}") ~= "") end
local function readflag(flag)
if not hasflag(flag) then return nil end
local value = rpm.expand("%{-" .. flag .. "*}")
if (value == '""') or (value == "''") then value = '' end
return value
end
\
local arg = {}; for a = 1, tonumber(rpm.expand("%#")) do
table.insert(arg, rpm.expand("%" .. a))
end
local opt = {
["a"] = hasflag("a"),
["n"] = readflag("n"),
["p"] = readflag("p"),
["r"] = readflag("r"),
["s"] = readflag("s"),
}
\
local component = arg[1] or error("Vendored component was not named!")
local version = arg[2] or (opt.a and read(component:gsub("-", "_") .. "_version")) or error("Missing component version!")
\
local mapvalues = function(fn, tbl)
local output = {}; for _, val in ipairs(tbl) do table.insert(output, fn(val)) end; return output
end
local splitnames = function(input)
local output = {}; for m in input:gmatch("[^,]+") do table.insert(output, m) end; return output
end
local nl = string.char(10); -- \n does not work in rpmlua
\
local possible_to_devendor = opt.p or opt.s
local should_devendor = possible_to_devendor and rpm.expand("%{with all_deps_bundled}") == "0"
\
local bcond_name = "bundled_" .. component:gsub("-", "_")
if should_devendor
then rpm.expand("%bcond_with " .. bcond_name)
else rpm.expand("%bcond_without " .. bcond_name)
end
\
if rpm.expand("%with " .. bcond_name) == "1" then
local provides = {string.format("bundled(%s) = %s", component, version)}
if opt.n then
local names = {component}; if opt.n ~= "" then names = splitnames(opt.n) end
for _, name in ipairs(names) do
table.insert(provides, string.format("npm(%s) = %s", name, version))
end
end
print("Provides: " .. table.concat(provides, ", "))
else
\
local buildrequire, require = nil, nil
if opt.p then
local format = function(n) return string.format("pkgconfig(%s)", n) end
local names = {component}; if opt.p ~= "" then names = splitnames(opt.p) end
buildrequire = "BuildRequires: " .. table.concat(mapvalues(format, names), ", ")
elseif opt.s then
local names = {component}; if opt.s ~= "" then names = splitnames(opt.s) end
buildrequire = "BuildRequires: " .. table.concat(names, ", ")
end
if opt.r then
local names = {component}; if opt.r ~= "" then names = splitnames(opt.r) end
require = "Requires: " .. table.concat(names, ", ")
end
\
print(table.concat({buildrequire, require}, nl))
end
}

View File

@ -1,2 +0,0 @@
%__nodejs_native_requires %{_rpmconfigdir}/nodejs_native.req
%__nodejs_native_path ^/usr/lib.*/node_modules/.*\\.node$

View File

@ -1 +0,0 @@
prefix=/usr/local

7
SOURCES/npmrc.in Normal file
View File

@ -0,0 +1,7 @@
# This is the distribution-level configuration file for npm.
# To configure npm on a system level, use the globalconfig below (defaults to @SYSCONFDIR@/npmrc).
# vim:set filetype=dosini:
globalconfig=@SYSCONFDIR@/npmrc
prefix=/usr/local
update-notifier=false

61
SOURCES/test-runner.sh Executable file
View File

@ -0,0 +1,61 @@
#!/bin/bash
NODE_BIN="$1"
PARENT_TEST_FOLDER="$2"
TEST_LIST_FILE="$3"
# At most 10 min per test
TIMEOUT_DURATION=600
# Exit code
FINAL_RESULT=0
ARCH=$(uname -m)
echo "Started test run:"
# Run the list of test
while IFS= read -r test_line; do
# ignore commented lines
if [[ "$test_line" =~ ^# ]]; then
continue
fi
# If test has specified ARCH which it should be skipped
# Extract it
TEST_PATH=$(echo "$test_line" | awk '{print $1}')
IGNORE_ARCHES=$(echo "$test_line" |\
awk '{for (i=2; i<=NF; i++) printf "%s ", $i; print ""}')
# Skip test for specified ARCH
for ARCH_IGNORE in $IGNORE_ARCHES; do
if [[ "$ARCH_IGNORE" == "$ARCH" ]]; then
echo "Skipping test, current arch is in ignore: $TEST_PATH ($ARCH_IGNORE)"
continue 2
fi
done
# Construct test path
TEST_SCRIPT="$PARENT_TEST_FOLDER/$TEST_PATH"
if [ ! -f "$TEST_SCRIPT" ]; then
echo "Test script not found: $TEST_SCRIPT"
continue
fi
TEST_OUTPUT=$(timeout "$TIMEOUT_DURATION" "$NODE_BIN" "$TEST_SCRIPT" 2>&1)
TEST_RESULT=$?
# Handle test result
if [ $TEST_RESULT -ne 0 ]; then
FINAL_RESULT=1
if [ $TEST_RESULT -eq 124 ]; then
echo "Test timed out: $TEST_SCRIPT"
else
echo "Test failed: $TEST_SCRIPT"
fi
echo "Test failure message:"
echo "$TEST_OUTPUT"
fi
done < "$TEST_LIST_FILE"
if [ $FINAL_RESULT -eq 0 ]; then
echo "All tests succesfully passed."
fi
exit $FINAL_RESULT

2736
SOURCES/test-should-pass.txt Normal file

File diff suppressed because it is too large Load Diff

9
SOURCES/v8.pc.in Normal file
View File

@ -0,0 +1,9 @@
prefix=@PREFIX@
includedir=@INCLUDEDIR@
libdir=@LIBDIR@
Name: @PKGCONFNAME@
Description: JavaScript Runtime
Version: @V8_VERSION@
Libs: -L${libdir} -lv8
Cflags: -I${includedir}

File diff suppressed because it is too large Load Diff