From f51e6cd24864290aec4404cea934b8c2f9a89a0f Mon Sep 17 00:00:00 2001 From: Jonathan Budzenski Date: Tue, 21 Nov 2023 07:15:23 -0600 Subject: [PATCH] Revert "Upgrade to Node.js 20 (#162696)" This reverts commit 029b3ba81d340ca0d8394397aeee127841f190c5. --- .buildkite/pipelines/artifacts.yml | 2 +- .ci/Dockerfile | 2 +- .node-version | 2 +- .nvmrc | 2 +- WORKSPACE.bazel | 12 ++--- package.json | 6 +-- .../discovery/plugin_manifest_parser.test.ts | 2 +- .../src/discovery/plugins_discovery.test.ts | 2 +- .../src/lib/archives/parse.test.ts | 2 +- .../integration_tests/__fixtures__/es_bin.js | 4 +- packages/kbn-es/src/utils/docker.test.ts | 15 +++--- packages/kbn-es/src/utils/docker.ts | 11 +---- .../build/tasks/patch_native_modules_task.ts | 20 ++++---- .../create_streaming_batched_function.test.ts | 2 +- .../public/search/expressions/esdsl.test.ts | 2 +- src/setup_node_env/exit_on_warning.js | 7 --- src/setup_node_env/heap_snapshot.js | 46 +++++++++++++++++++ src/setup_node_env/setup_env.js | 2 + .../user_actions/connector_id.test.ts | 6 +-- .../document_creation_logic.test.ts | 2 +- .../server/browsers/download/fetch.test.ts | 4 +- .../server/browsers/download/fetch.ts | 12 ++--- .../server/browsers/download/index.test.ts | 10 ++-- .../server/browsers/download/index.ts | 10 +--- .../server/browsers/extract/unzip.test.ts | 4 +- .../lib/check_for_json_errors.test.ts | 2 +- .../api/rules/import_rules/route.test.ts | 2 +- .../create_rules_stream_from_ndjson.test.ts | 4 +- .../server/saved_objects/migrations.test.ts | 2 +- yarn.lock | 15 ++---- 30 files changed, 115 insertions(+), 99 deletions(-) create mode 100644 src/setup_node_env/heap_snapshot.js diff --git a/.buildkite/pipelines/artifacts.yml b/.buildkite/pipelines/artifacts.yml index 5b219952c8cf11..4a52ee7402823c 100644 --- a/.buildkite/pipelines/artifacts.yml +++ b/.buildkite/pipelines/artifacts.yml @@ -88,7 +88,7 @@ steps: - exit_status: -1 agents: queue: n2-2 - timeout_in_minutes: 60 + timeout_in_minutes: 30 if: "build.env('RELEASE_BUILD') == null || build.env('RELEASE_BUILD') == '' || build.env('RELEASE_BUILD') == 'false'" retry: automatic: diff --git a/.ci/Dockerfile b/.ci/Dockerfile index 91e0d5bf36a9d5..109b9ffab3cc56 100644 --- a/.ci/Dockerfile +++ b/.ci/Dockerfile @@ -1,7 +1,7 @@ # NOTE: This Dockerfile is ONLY used to run certain tasks in CI. It is not used to run Kibana or as a distributable. # If you're looking for the Kibana Docker image distributable, please see: src/dev/build/tasks/os_packages/docker_generator/templates/dockerfile.template.ts -ARG NODE_VERSION=20.9.0 +ARG NODE_VERSION=18.18.2 FROM node:${NODE_VERSION} AS base diff --git a/.node-version b/.node-version index f3f52b42d3da97..87ec8842b158d2 100644 --- a/.node-version +++ b/.node-version @@ -1 +1 @@ -20.9.0 +18.18.2 diff --git a/.nvmrc b/.nvmrc index f3f52b42d3da97..87ec8842b158d2 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -20.9.0 +18.18.2 diff --git a/WORKSPACE.bazel b/WORKSPACE.bazel index 38802ebef5b1d8..e614bdff172f99 100644 --- a/WORKSPACE.bazel +++ b/WORKSPACE.bazel @@ -22,13 +22,13 @@ load("@build_bazel_rules_nodejs//:index.bzl", "node_repositories", "yarn_install # Setup the Node.js toolchain for the architectures we want to support node_repositories( node_repositories = { - "20.9.0-darwin_amd64": ("node-v20.9.0-darwin-x64.tar.gz", "node-v20.9.0-darwin-x64", "fc5b73f2a78c17bbe926cdb1447d652f9f094c79582f1be6471b4b38a2e1ccc8"), - "20.9.0-darwin_arm64": ("node-v20.9.0-darwin-arm64.tar.gz", "node-v20.9.0-darwin-arm64", "31d2d46ae8d8a3982f54e2ff1e60c2e4a8e80bf78a3e8b46dcaac95ac5d7ce6a"), - "20.9.0-linux_arm64": ("node-v20.9.0-linux-arm64.tar.xz", "node-v20.9.0-linux-arm64", "79c07c41c9f2410e35fd8dec61491ba63762e428bffa2ee0ff3aec1afe05d4b1"), - "20.9.0-linux_amd64": ("node-v20.9.0-linux-x64.tar.xz", "node-v20.9.0-linux-x64", "d11a5e06d6fda8d0cb1a759365d2b5e33c609f3c9f333fdc63e0522475dc0c89"), - "20.9.0-windows_amd64": ("node-v20.9.0-win-x64.zip", "node-v20.9.0-win-x64", "70d87dad2378c63216ff83d5a754c61d2886fc39d32ce0d2ea6de763a22d3780"), + "18.18.2-darwin_amd64": ("node-v18.18.2-darwin-x64.tar.gz", "node-v18.18.2-darwin-x64", "5bb8da908ed590e256a69bf2862238c8a67bc4600119f2f7721ca18a7c810c0f"), + "18.18.2-darwin_arm64": ("node-v18.18.2-darwin-arm64.tar.gz", "node-v18.18.2-darwin-arm64", "9f982cc91b28778dd8638e4f94563b0c2a1da7aba62beb72bd427721035ab553"), + "18.18.2-linux_arm64": ("node-v18.18.2-linux-arm64.tar.xz", "node-v18.18.2-linux-arm64", "8a5a03f6a742159c9aa0ae3a99b368cd938cf62f3a5522a2e5acbe6313710efe"), + "18.18.2-linux_amd64": ("node-v18.18.2-linux-x64.tar.xz", "node-v18.18.2-linux-x64", "f7cf590bc7153f3beaa9e1138d00e50d74df223f0bec61f63e7df65f7315b76a"), + "18.18.2-windows_amd64": ("node-v18.18.2-win-x64.zip", "node-v18.18.2-win-x64", "3bb0e51e579a41a22b3bf6cb2f3e79c03801aa17acbe0ca00fc555d1282e7acd"), }, - node_version = "20.9.0", + node_version = "18.18.2", node_urls = [ "https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/dist/v{version}/{filename}", ], diff --git a/package.json b/package.json index 73aff64a7f9327..0f8260566b536f 100644 --- a/package.json +++ b/package.json @@ -74,12 +74,12 @@ "url": "https://github.com/elastic/kibana.git" }, "engines": { - "node": "20.9.0", + "node": "18.18.2", "yarn": "^1.22.19" }, "resolutions": { "**/@hello-pangea/dnd": "16.2.0", - "**/@types/node": "20.9.0", + "**/@types/node": "18.18.5", "**/@typescript-eslint/utils": "5.62.0", "**/chokidar": "^3.5.3", "**/globule/minimatch": "^3.1.2", @@ -1379,7 +1379,7 @@ "@types/multistream": "^4.1.0", "@types/mustache": "^0.8.31", "@types/nock": "^10.0.3", - "@types/node": "20.9.0", + "@types/node": "18.18.5", "@types/node-fetch": "2.6.4", "@types/node-forge": "^1.3.1", "@types/nodemailer": "^6.4.0", diff --git a/packages/core/plugins/core-plugins-server-internal/src/discovery/plugin_manifest_parser.test.ts b/packages/core/plugins/core-plugins-server-internal/src/discovery/plugin_manifest_parser.test.ts index 202cef2ca09d41..f940e49805cd5a 100644 --- a/packages/core/plugins/core-plugins-server-internal/src/discovery/plugin_manifest_parser.test.ts +++ b/packages/core/plugins/core-plugins-server-internal/src/discovery/plugin_manifest_parser.test.ts @@ -59,7 +59,7 @@ test('return error when manifest content is not a valid JSON', async () => { }); await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({ - message: `Unexpected token 'o', "not-json" is not valid JSON (invalid-manifest, ${pluginManifestPath})`, + message: `Unexpected token o in JSON at position 1 (invalid-manifest, ${pluginManifestPath})`, type: PluginDiscoveryErrorType.InvalidManifest, path: pluginManifestPath, }); diff --git a/packages/core/plugins/core-plugins-server-internal/src/discovery/plugins_discovery.test.ts b/packages/core/plugins/core-plugins-server-internal/src/discovery/plugins_discovery.test.ts index 93657a1f2533e7..9ab50d8786ccab 100644 --- a/packages/core/plugins/core-plugins-server-internal/src/discovery/plugins_discovery.test.ts +++ b/packages/core/plugins/core-plugins-server-internal/src/discovery/plugins_discovery.test.ts @@ -278,7 +278,7 @@ describe('plugins discovery system', () => { .toPromise(); expect(errors).toContain( - `Error: Unexpected token 'o', "not-json" is not valid JSON (invalid-manifest, ${manifestPath( + `Error: Unexpected token o in JSON at position 1 (invalid-manifest, ${manifestPath( 'plugin_a' )})` ); diff --git a/packages/kbn-es-archiver/src/lib/archives/parse.test.ts b/packages/kbn-es-archiver/src/lib/archives/parse.test.ts index 9e0d5c02238b84..b87e6f71400e0b 100644 --- a/packages/kbn-es-archiver/src/lib/archives/parse.test.ts +++ b/packages/kbn-es-archiver/src/lib/archives/parse.test.ts @@ -98,7 +98,7 @@ describe('esArchiver createParseArchiveStreams', () => { ] as [Readable, ...Writable[]]); throw new Error('should have failed'); } catch (err) { - expect(err.message).toEqual(`Expected property name or '}' in JSON at position 1`); + expect(err.message).toEqual(expect.stringContaining('Unexpected number')); } }); }); diff --git a/packages/kbn-es/src/integration_tests/__fixtures__/es_bin.js b/packages/kbn-es/src/integration_tests/__fixtures__/es_bin.js index 0d44b8a775b25d..5ef61456988115 100644 --- a/packages/kbn-es/src/integration_tests/__fixtures__/es_bin.js +++ b/packages/kbn-es/src/integration_tests/__fixtures__/es_bin.js @@ -87,11 +87,11 @@ const { ES_KEY_PATH, ES_CERT_PATH } = require('@kbn/dev-utils'); } ); - // setup server auto close after 5 second of silence + // setup server auto close after 1 second of silence let serverCloseTimer; const delayServerClose = () => { clearTimeout(serverCloseTimer); - serverCloseTimer = setTimeout(() => server.close(), 5000); + serverCloseTimer = setTimeout(() => server.close(), 1000); }; server.on('request', delayServerClose); server.on('listening', delayServerClose); diff --git a/packages/kbn-es/src/utils/docker.test.ts b/packages/kbn-es/src/utils/docker.test.ts index 2def181febb589..2d71a4e628e118 100644 --- a/packages/kbn-es/src/utils/docker.test.ts +++ b/packages/kbn-es/src/utils/docker.test.ts @@ -7,7 +7,8 @@ */ import mockFs from 'mock-fs'; -import Fsp from 'fs/promises'; +import { existsSync } from 'fs'; +import { stat } from 'fs/promises'; import { basename } from 'path'; import { @@ -108,7 +109,7 @@ const volumeCmdTest = async (volumeCmd: string[]) => { // extract only permission from mode // eslint-disable-next-line no-bitwise - expect((await Fsp.stat(serverlessObjectStorePath)).mode & 0o777).toBe(0o777); + expect((await stat(serverlessObjectStorePath)).mode & 0o777).toBe(0o777); }; describe('resolveDockerImage()', () => { @@ -441,7 +442,7 @@ describe('setupServerlessVolumes()', () => { const volumeCmd = await setupServerlessVolumes(log, { basePath: baseEsPath }); volumeCmdTest(volumeCmd); - await expect(Fsp.access(serverlessObjectStorePath)).resolves.not.toThrow(); + expect(existsSync(serverlessObjectStorePath)).toBe(true); }); test('should use an existing object store', async () => { @@ -450,9 +451,7 @@ describe('setupServerlessVolumes()', () => { const volumeCmd = await setupServerlessVolumes(log, { basePath: baseEsPath }); volumeCmdTest(volumeCmd); - await expect( - Fsp.access(`${serverlessObjectStorePath}/cluster_state/lease`) - ).resolves.not.toThrow(); + expect(existsSync(`${serverlessObjectStorePath}/cluster_state/lease`)).toBe(true); }); test('should remove an existing object store when clean is passed', async () => { @@ -461,9 +460,7 @@ describe('setupServerlessVolumes()', () => { const volumeCmd = await setupServerlessVolumes(log, { basePath: baseEsPath, clean: true }); volumeCmdTest(volumeCmd); - await expect( - Fsp.access(`${serverlessObjectStorePath}/cluster_state/lease`) - ).rejects.toThrowError(); + expect(existsSync(`${serverlessObjectStorePath}/cluster_state/lease`)).toBe(false); }); test('should add SSL volumes when ssl is passed', async () => { diff --git a/packages/kbn-es/src/utils/docker.ts b/packages/kbn-es/src/utils/docker.ts index 2981acb56d0f49..1c89339e1a5670 100644 --- a/packages/kbn-es/src/utils/docker.ts +++ b/packages/kbn-es/src/utils/docker.ts @@ -486,19 +486,12 @@ export async function setupServerlessVolumes(log: ToolingLog, options: Serverles log.info(chalk.bold(`Checking for local serverless ES object store at ${objectStorePath}`)); log.indent(4); - let exists = null; - try { - await Fsp.access(objectStorePath); - exists = true; - } catch (e) { - exists = false; - } - if (clean && exists) { + if (clean && fs.existsSync(objectStorePath)) { log.info('Cleaning existing object store.'); await Fsp.rm(objectStorePath, { recursive: true, force: true }); } - if (clean || !exists) { + if (clean || !fs.existsSync(objectStorePath)) { await Fsp.mkdir(objectStorePath, { recursive: true }).then(() => log.info('Created new object store.') ); diff --git a/src/dev/build/tasks/patch_native_modules_task.ts b/src/dev/build/tasks/patch_native_modules_task.ts index c928d8a5b4d062..24ab98d1322036 100644 --- a/src/dev/build/tasks/patch_native_modules_task.ts +++ b/src/dev/build/tasks/patch_native_modules_task.ts @@ -47,8 +47,8 @@ const packages: Package[] = [ extractMethod: 'gunzip', archives: { 'linux-x64': { - url: 'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-re2/uhop/node-re2/releases/download/1.20.1/linux-x64-115.gz', - sha256: '7a4821ef7e9ddbafe5bba6beb54b100c233242f0dbf5a7268f55beea5f845f97', + url: 'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-re2/uhop/node-re2/releases/download/1.20.1/linux-x64-108.gz', + sha256: 'e14f274f73ede22f170bfe9e57a0645ebf7ed320042a27361fa158bc239a5563', }, // Linux ARM builds are currently done manually as Github Actions used in upstream project // do not natively support an Linux ARM target. @@ -63,20 +63,20 @@ const packages: Package[] = [ // * capture the sha256 with: `shasum -a 256 linux-arm64-*` // * upload the `linux-arm64-*.gz` artifact to the `yarn-prebuilt-artifacts` bucket in GCS using the correct version number 'linux-arm64': { - url: 'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-re2/uhop/node-re2/releases/download/1.20.1/linux-arm64-115.gz', - sha256: '8d753d6ac15d95d6d236dce2f986f4a6b2f9945ba0d927ab972eb82da68d14b1', + url: 'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-re2/uhop/node-re2/releases/download/1.20.1/linux-arm64-108.gz', + sha256: 'cbdf3f75a331c601ac0bd34715814d0a1fd17612c6d6b5269f176d46044defd5', }, 'darwin-x64': { - url: 'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-re2/uhop/node-re2/releases/download/1.20.1/darwin-x64-115.gz', - sha256: '91823077c510c6da9c428038bfd210846373bcd0ab6851f7408add67864785a9', + url: 'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-re2/uhop/node-re2/releases/download/1.20.1/darwin-x64-108.gz', + sha256: 'f88c09e98f152ac15c593b3b923b7fbe28d448cfde5986da40c34461bede5a09', }, 'darwin-arm64': { - url: 'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-re2/uhop/node-re2/releases/download/1.20.1/darwin-arm64-115.gz', - sha256: '935e2a5590e93e6f52f41d40ae4115fbd2f130a4d61afb0a6549ed17adb1dd84', + url: 'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-re2/uhop/node-re2/releases/download/1.20.1/darwin-arm64-108.gz', + sha256: '80700aecbe63052149aba721449a8ce30c24d884e414025124bb4602efe708be', }, 'win32-x64': { - url: 'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-re2/uhop/node-re2/releases/download/1.20.1/win32-x64-115.gz', - sha256: '120c2663bcab4803f8405d9aa2cb97fa181d90b4ff176827cc7295667aa2e9c2', + url: 'https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/node-re2/uhop/node-re2/releases/download/1.20.1/win32-x64-108.gz', + sha256: 'cadc4713907f3ad1de45f470810ec8e13e08f32c1a1e45e5d5ab5e9d7fcb9763', }, }, }, diff --git a/src/plugins/bfetch/public/batching/create_streaming_batched_function.test.ts b/src/plugins/bfetch/public/batching/create_streaming_batched_function.test.ts index f4eeca4bc5ba63..fb9db4490144b0 100644 --- a/src/plugins/bfetch/public/batching/create_streaming_batched_function.test.ts +++ b/src/plugins/bfetch/public/batching/create_streaming_batched_function.test.ts @@ -744,7 +744,7 @@ describe('createStreamingBatchedFunction()', () => { const [, error1] = await promise1; const [result1] = await promise2; expect(error1).toMatchObject({ - message: `Unexpected token 'N', "Not a JSON\n" is not valid JSON`, + message: 'Unexpected token N in JSON at position 0', code: 'STREAM', }); expect(result1).toMatchObject({ diff --git a/src/plugins/data/public/search/expressions/esdsl.test.ts b/src/plugins/data/public/search/expressions/esdsl.test.ts index 603dbf1e8f3eb4..2bf2ef1148507d 100644 --- a/src/plugins/data/public/search/expressions/esdsl.test.ts +++ b/src/plugins/data/public/search/expressions/esdsl.test.ts @@ -63,7 +63,7 @@ describe('esdsl', () => { } catch (error) { errorMessage = error.message; } - expect(errorMessage).toEqual(`Unexpected token 'i', "invalid json" is not valid JSON`); + expect(errorMessage).toEqual('Unexpected token i in JSON at position 0'); }); test('adds filters', async () => { diff --git a/src/setup_node_env/exit_on_warning.js b/src/setup_node_env/exit_on_warning.js index dc6e3210742246..5e7bae8254c04e 100644 --- a/src/setup_node_env/exit_on_warning.js +++ b/src/setup_node_env/exit_on_warning.js @@ -46,13 +46,6 @@ var IGNORE_WARNINGS = [ // We need to discard that warning name: 'ProductNotSupportedSecurityError', }, - // https://github.com/browserify/browserify-rsa/pull/20 - { - name: 'DeprecationWarning', - code: 'DEP0170', - message: - 'The URL https://github.com:crypto-browserify/browserify-rsa.git is invalid. Future versions of Node.js will throw an error.', - }, ]; if (process.noProcessWarnings !== true) { diff --git a/src/setup_node_env/heap_snapshot.js b/src/setup_node_env/heap_snapshot.js new file mode 100644 index 00000000000000..94e4b35e2f8870 --- /dev/null +++ b/src/setup_node_env/heap_snapshot.js @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +var getopts = require('getopts'); +var path = require('path'); +var v8 = require('node:v8'); +var worker = require('node:worker_threads'); + +var execOpts = getopts(process.execArgv); +var envOpts = getopts(process.env.NODE_OPTIONS ? process.env.NODE_OPTIONS.split(/\s+/) : []); +var diagnosticDir = execOpts['diagnostic-dir'] || envOpts['diagnostic-dir']; +var heapSnapshotSignal = execOpts['heapsnapshot-signal'] || envOpts['heapsnapshot-signal']; +var heapSnapshotSerial = 0; + +function getHeapSnapshotPath() { + var now = new Date(); + + var year = now.getFullYear(); + var month = String(now.getMonth() + 1).padStart(2, '0'); + var day = String(now.getDate()).padStart(2, '0'); + var hours = String(now.getHours()).padStart(2, '0'); + var minutes = String(now.getMinutes()).padStart(2, '0'); + var seconds = String(now.getSeconds()).padStart(2, '0'); + + var date = `${year}${month}${day}`; + var time = `${hours}${minutes}${seconds}`; + var pid = process.pid; + var threadId = worker.threadId; + var serial = (++heapSnapshotSerial).toString().padStart(3, '0'); + + return path.join(diagnosticDir, `Heap.${date}.${time}.${pid}.${threadId}.${serial}.heapsnapshot`); +} + +if (diagnosticDir && heapSnapshotSignal) { + process.removeAllListeners(heapSnapshotSignal); + + process.on(heapSnapshotSignal, function () { + var heapSnapshotPath = getHeapSnapshotPath(); + v8.writeHeapSnapshot(heapSnapshotPath); + }); +} diff --git a/src/setup_node_env/setup_env.js b/src/setup_node_env/setup_env.js index 7b37d98011cfb8..d3076a2c3b9cf5 100644 --- a/src/setup_node_env/setup_env.js +++ b/src/setup_node_env/setup_env.js @@ -11,6 +11,8 @@ require('./exit_on_warning'); require('./harden'); // The following require statements MUST be executed before any others - END +// @todo Remove when migrated to Node 20 (#162696) +require('./heap_snapshot'); require('symbol-observable'); require('source-map-support').install(); require('./node_version_validator'); diff --git a/x-pack/plugins/cases/server/saved_object_types/migrations/user_actions/connector_id.test.ts b/x-pack/plugins/cases/server/saved_object_types/migrations/user_actions/connector_id.test.ts index 2ed37837f5100a..ed43cc40649c64 100644 --- a/x-pack/plugins/cases/server/saved_object_types/migrations/user_actions/connector_id.test.ts +++ b/x-pack/plugins/cases/server/saved_object_types/migrations/user_actions/connector_id.test.ts @@ -254,7 +254,7 @@ describe('user action migrations', () => { const log = context.log as jest.Mocked; expect(log.error.mock.calls[0]).toMatchInlineSnapshot(` Array [ - "Failed to migrate user action connector with doc id: 1 version: 8.0.0 error: Expected property name or '}' in JSON at position 1", + "Failed to migrate user action connector with doc id: 1 version: 8.0.0 error: Unexpected token a in JSON at position 1", Object { "migrations": Object { "userAction": Object { @@ -448,7 +448,7 @@ describe('user action migrations', () => { const log = context.log as jest.Mocked; expect(log.error.mock.calls[0]).toMatchInlineSnapshot(` Array [ - "Failed to migrate user action connector with doc id: 1 version: 8.0.0 error: Expected property name or '}' in JSON at position 1", + "Failed to migrate user action connector with doc id: 1 version: 8.0.0 error: Unexpected token b in JSON at position 1", Object { "migrations": Object { "userAction": Object { @@ -644,7 +644,7 @@ describe('user action migrations', () => { const log = context.log as jest.Mocked; expect(log.error.mock.calls[0]).toMatchInlineSnapshot(` Array [ - "Failed to migrate user action connector with doc id: 1 version: 8.0.0 error: Unexpected token 'e', \\"new json value\\" is not valid JSON", + "Failed to migrate user action connector with doc id: 1 version: 8.0.0 error: Unexpected token e in JSON at position 1", Object { "migrations": Object { "userAction": Object { diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/document_creation/document_creation_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/document_creation/document_creation_logic.test.ts index 925f5af35ff48c..3e39c50ff8de70 100644 --- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/document_creation/document_creation_logic.test.ts +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/document_creation/document_creation_logic.test.ts @@ -404,7 +404,7 @@ describe('DocumentCreationLogic', () => { DocumentCreationLogic.actions.onSubmitJson(); expect(DocumentCreationLogic.actions.setErrors).toHaveBeenCalledWith([ - `Unexpected token 'i', "invalid JSON" is not valid JSON`, + 'Unexpected token i in JSON at position 0', ]); expect(DocumentCreationLogic.actions.uploadDocuments).not.toHaveBeenCalled(); }); diff --git a/x-pack/plugins/screenshotting/server/browsers/download/fetch.test.ts b/x-pack/plugins/screenshotting/server/browsers/download/fetch.test.ts index b32f98aeffe206..cc22f152216afd 100644 --- a/x-pack/plugins/screenshotting/server/browsers/download/fetch.test.ts +++ b/x-pack/plugins/screenshotting/server/browsers/download/fetch.test.ts @@ -8,7 +8,7 @@ import mockFs from 'mock-fs'; import axios from 'axios'; import { createHash } from 'crypto'; -import { readFile } from 'fs/promises'; +import { readFileSync } from 'fs'; import { resolve as resolvePath } from 'path'; import { Readable } from 'stream'; import { fetch } from './fetch'; @@ -38,7 +38,7 @@ describe('fetch', () => { test('downloads the url to the path', async () => { await fetch('url', TEMP_FILE); - await expect(readFile(TEMP_FILE, 'utf8')).resolves.toBe('foobar'); + expect(readFileSync(TEMP_FILE, 'utf8')).toEqual('foobar'); }); test('returns the md5 hex hash of the http body', async () => { diff --git a/x-pack/plugins/screenshotting/server/browsers/download/fetch.ts b/x-pack/plugins/screenshotting/server/browsers/download/fetch.ts index e248aad94c14e6..ba3e083816937c 100644 --- a/x-pack/plugins/screenshotting/server/browsers/download/fetch.ts +++ b/x-pack/plugins/screenshotting/server/browsers/download/fetch.ts @@ -7,8 +7,7 @@ import Axios from 'axios'; import { createHash } from 'crypto'; -import { mkdir, open } from 'fs/promises'; -import { writeSync } from 'fs'; +import { closeSync, mkdirSync, openSync, writeSync } from 'fs'; import { dirname } from 'path'; import { finished, Readable } from 'stream'; import { promisify } from 'util'; @@ -22,8 +21,9 @@ export async function fetch(url: string, path: string, logger?: Logger): Promise const hash = createHash('md5'); - await mkdir(dirname(path), { recursive: true }); - const handle = await open(path, 'w'); + mkdirSync(dirname(path), { recursive: true }); + const handle = openSync(path, 'w'); + try { const response = await Axios.request({ url, @@ -32,7 +32,7 @@ export async function fetch(url: string, path: string, logger?: Logger): Promise }); response.data.on('data', (chunk: Buffer) => { - writeSync(handle.fd, chunk); + writeSync(handle, chunk); hash.update(chunk); }); @@ -43,7 +43,7 @@ export async function fetch(url: string, path: string, logger?: Logger): Promise throw new Error(`Unable to download ${url}: ${error}`); } finally { - await handle.close(); + closeSync(handle); } return hash.digest('hex'); diff --git a/x-pack/plugins/screenshotting/server/browsers/download/index.test.ts b/x-pack/plugins/screenshotting/server/browsers/download/index.test.ts index 8b8115c5a61642..887a631a2c1d71 100644 --- a/x-pack/plugins/screenshotting/server/browsers/download/index.test.ts +++ b/x-pack/plugins/screenshotting/server/browsers/download/index.test.ts @@ -7,7 +7,7 @@ import path from 'path'; import mockFs from 'mock-fs'; -import { access, readdir } from 'fs/promises'; +import { existsSync, readdirSync } from 'fs'; import { ChromiumArchivePaths, PackageInfo } from '../chromium'; import { fetch } from './fetch'; import { md5 } from './checksum'; @@ -55,8 +55,8 @@ describe('ensureDownloaded', () => { await download(paths, pkg); - await expect(access(unexpectedPath1)).rejects.toThrow(); - await expect(access(unexpectedPath2)).rejects.toThrow(); + expect(existsSync(unexpectedPath1)).toBe(false); + expect(existsSync(unexpectedPath2)).toBe(false); }); it('should reject when download fails', async () => { @@ -84,14 +84,14 @@ describe('ensureDownloaded', () => { await download(paths, pkg); expect(fetch).not.toHaveBeenCalled(); - await expect(readdir(path.resolve(`${paths.archivesPath}/x64`))).resolves.toEqual( + expect(readdirSync(path.resolve(`${paths.archivesPath}/x64`))).toEqual( expect.arrayContaining([ 'chrome-mac.zip', 'chrome-win.zip', expect.stringMatching(/^chromium-[0-9a-f]{7}-locales-linux_x64\.zip$/), ]) ); - await expect(readdir(path.resolve(`${paths.archivesPath}/arm64`))).resolves.toEqual( + expect(readdirSync(path.resolve(`${paths.archivesPath}/arm64`))).toEqual( expect.arrayContaining([ 'chrome-mac.zip', expect.stringMatching(/^chromium-[0-9a-f]{7}-locales-linux_arm64\.zip$/), diff --git a/x-pack/plugins/screenshotting/server/browsers/download/index.ts b/x-pack/plugins/screenshotting/server/browsers/download/index.ts index 61b46ac84c588c..ff8de66df743a2 100644 --- a/x-pack/plugins/screenshotting/server/browsers/download/index.ts +++ b/x-pack/plugins/screenshotting/server/browsers/download/index.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { access } from 'fs/promises'; +import { existsSync } from 'fs'; import del from 'del'; import type { Logger } from '@kbn/core/server'; import type { ChromiumArchivePaths, PackageInfo } from '../chromium'; @@ -42,13 +42,7 @@ export async function download( const resolvedPath = paths.resolvePath(pkg); const foundChecksum = await md5(resolvedPath).catch(() => 'MISSING'); - let pathExists = null; - try { - await access(resolvedPath); - pathExists = true; - } catch (e) { - pathExists = false; - } + const pathExists = existsSync(resolvedPath); if (pathExists && foundChecksum === archiveChecksum) { logger?.debug( `Browser archive for ${pkg.platform}/${pkg.architecture} already found in ${resolvedPath} with matching checksum.` diff --git a/x-pack/plugins/screenshotting/server/browsers/extract/unzip.test.ts b/x-pack/plugins/screenshotting/server/browsers/extract/unzip.test.ts index 08eac21450f262..4af457a0c3a6e3 100644 --- a/x-pack/plugins/screenshotting/server/browsers/extract/unzip.test.ts +++ b/x-pack/plugins/screenshotting/server/browsers/extract/unzip.test.ts @@ -6,7 +6,7 @@ */ import mockFs from 'mock-fs'; -import { readFile } from 'fs/promises'; +import { readFileSync } from 'fs'; import { ExtractError } from './extract_error'; import { unzip } from './unzip'; @@ -28,7 +28,7 @@ describe('unzip', () => { it('should extract zipped contents', async () => { await unzip('/test.zip', '/output'); - await expect(readFile('/output/test.txt', 'utf8')).resolves.toBe('test'); + expect(readFileSync('/output/test.txt').toString()).toEqual('test'); }); it('should reject on invalid archive', async () => { diff --git a/x-pack/plugins/searchprofiler/public/application/lib/check_for_json_errors.test.ts b/x-pack/plugins/searchprofiler/public/application/lib/check_for_json_errors.test.ts index 606b302fc85594..ccd8761c031b23 100644 --- a/x-pack/plugins/searchprofiler/public/application/lib/check_for_json_errors.test.ts +++ b/x-pack/plugins/searchprofiler/public/application/lib/check_for_json_errors.test.ts @@ -12,7 +12,7 @@ describe('checkForParseErrors', function () { it('returns error from bad JSON', function () { const json = '{"foo": {"bar": {"baz": "buzz}}}'; const result = checkForParseErrors(json); - expect(result.error.message).to.be(`Unterminated string in JSON at position 32`); + expect(result.error.message).to.be(`Unexpected end of JSON input`); }); it('returns parsed value from good JSON', function () { diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rule_management/api/rules/import_rules/route.test.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/rule_management/api/rules/import_rules/route.test.ts index da4f9e67d7b8f1..f383a9d11cc00a 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rule_management/api/rules/import_rules/route.test.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rule_management/api/rules/import_rules/route.test.ts @@ -167,7 +167,7 @@ describe('Import rules route', () => { errors: [ { error: { - message: `Unexpected token 'h', "this is not"... is not valid JSON`, + message: 'Unexpected token h in JSON at position 1', status_code: 400, }, rule_id: '(unknown id)', diff --git a/x-pack/plugins/security_solution/server/lib/detection_engine/rule_management/logic/import/create_rules_stream_from_ndjson.test.ts b/x-pack/plugins/security_solution/server/lib/detection_engine/rule_management/logic/import/create_rules_stream_from_ndjson.test.ts index 50b8d5e7ee6fc9..be9561598fd084 100644 --- a/x-pack/plugins/security_solution/server/lib/detection_engine/rule_management/logic/import/create_rules_stream_from_ndjson.test.ts +++ b/x-pack/plugins/security_solution/server/lib/detection_engine/rule_management/logic/import/create_rules_stream_from_ndjson.test.ts @@ -234,9 +234,7 @@ describe('create_rules_stream_from_ndjson', () => { type: 'query', immutable: false, }); - expect(resultOrError[1].message).toEqual( - `Expected property name or '}' in JSON at position 1` - ); + expect(resultOrError[1].message).toEqual('Unexpected token , in JSON at position 1'); expect(resultOrError[2]).toEqual({ rule_id: 'rule-2', output_index: '.siem-signals', diff --git a/x-pack/plugins/task_manager/server/saved_objects/migrations.test.ts b/x-pack/plugins/task_manager/server/saved_objects/migrations.test.ts index 794a9c466906ff..081918673d82af 100644 --- a/x-pack/plugins/task_manager/server/saved_objects/migrations.test.ts +++ b/x-pack/plugins/task_manager/server/saved_objects/migrations.test.ts @@ -282,7 +282,7 @@ describe('handles errors during migrations', () => { migration800(taskInstance, migrationContext); }).toThrowError(); expect(migrationContext.log.error).toHaveBeenCalledWith( - `savedObject 8.0.0 migration failed for task instance ${taskInstance.id} with error: Expected property name or '}' in JSON at position 2`, + `savedObject 8.0.0 migration failed for task instance ${taskInstance.id} with error: Unexpected token s in JSON at position 2`, { migrations: { taskInstanceDocument: { diff --git a/yarn.lock b/yarn.lock index 9c1d247a8f5fb1..8016568cd10f5a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -9549,12 +9549,10 @@ dependencies: "@types/node" "*" -"@types/node@*", "@types/node@20.9.0", "@types/node@>= 8", "@types/node@>=12.12.47", "@types/node@>=13.7.0", "@types/node@>=8.9.0", "@types/node@^10.1.0", "@types/node@^14.0.10 || ^16.0.0", "@types/node@^14.14.20 || ^16.0.0", "@types/node@^18.11.18", "@types/node@^18.17.5": - version "20.9.0" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.9.0.tgz#bfcdc230583aeb891cf51e73cfdaacdd8deae298" - integrity sha512-nekiGu2NDb1BcVofVcEKMIwzlx4NjHlcjhoxxKBNLtz15Y1z7MYf549DFvkHSId02Ax6kGwWntIBPC3l/JZcmw== - dependencies: - undici-types "~5.26.4" +"@types/node@*", "@types/node@18.18.5", "@types/node@>= 8", "@types/node@>=12.12.47", "@types/node@>=13.7.0", "@types/node@>=8.9.0", "@types/node@^10.1.0", "@types/node@^14.0.10 || ^16.0.0", "@types/node@^14.14.20 || ^16.0.0", "@types/node@^18.11.18", "@types/node@^18.17.5": + version "18.18.5" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.18.5.tgz#afc0fd975df946d6e1add5bbf98264225b212244" + integrity sha512-4slmbtwV59ZxitY4ixUZdy1uRLf9eSIvBWPQxNjhHYWEtn0FryfKpyS2cvADYXTayWdKEIsJengncrVvkI4I6A== "@types/nodemailer@^6.4.0": version "6.4.0" @@ -29386,11 +29384,6 @@ unc-path-regex@^0.1.2: resolved "https://registry.yarnpkg.com/unc-path-regex/-/unc-path-regex-0.1.2.tgz#e73dd3d7b0d7c5ed86fbac6b0ae7d8c6a69d50fa" integrity sha1-5z3T17DXxe2G+6xrCufYxqadUPo= -undici-types@~5.26.4: - version "5.26.5" - resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" - integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== - undici@^5.21.2, undici@^5.22.1: version "5.26.3" resolved "https://registry.yarnpkg.com/undici/-/undici-5.26.3.tgz#ab3527b3d5bb25b12f898dfd22165d472dd71b79"