diff --git a/.changeset/eight-rice-jog.md b/.changeset/eight-rice-jog.md new file mode 100644 index 000000000..589313984 --- /dev/null +++ b/.changeset/eight-rice-jog.md @@ -0,0 +1,6 @@ +--- +"@replayio/cypress": patch +"@replayio/playwright": patch +--- + +Improved resiliency to GitHub API errors when auto-populating PR-related information metadata diff --git a/packages/replay/package.json b/packages/replay/package.json index 0337acf7c..f73c8291b 100644 --- a/packages/replay/package.json +++ b/packages/replay/package.json @@ -74,8 +74,7 @@ "./src/bin.ts", "./src/main.ts", "./src/utils.ts", - "./src/metadata/*.ts", - "./src/metadata/test/index.ts" + "./src/metadata/*.ts" ] } } diff --git a/packages/replay/src/main.ts b/packages/replay/src/main.ts index f23799620..a847c742b 100644 --- a/packages/replay/src/main.ts +++ b/packages/replay/src/main.ts @@ -18,7 +18,10 @@ import { readToken } from "./auth"; import { ProtocolError } from "./client"; import { ensureBrowsersInstalled, getExecutablePath, updateBrowsers } from "./install"; import { getLaunchDarkly } from "./launchdarkly"; -import { add, sanitize, source as sourceMetadata, test as testMetadata } from "./metadata"; +export { sanitizeMetadata as sanitize } from "@replay-cli/shared/recording/metadata/sanitizeMetadata"; +import * as sourceMetadata from "@replay-cli/shared/recording/metadata/legacy/source"; +import * as testMetadata from "@replay-cli/shared/recording/metadata/legacy/test/index"; +import { addMetadata } from "@replay-cli/shared/recording/metadata/addMetadata"; import { addRecordingEvent, readRecordings, @@ -38,10 +41,11 @@ import { UploadAllOptions, UploadOptions, type ExternalRecordingEntry, - type UnstructuredMetadata, } from "./types"; +export type { UnstructuredMetadata } from "@replay-cli/shared/recording/types"; import { ReplayClient } from "./upload"; import { getDirectory, maybeLog, openExecutable } from "./utils"; +import { sanitizeMetadata } from "@replay-cli/shared/recording/metadata/sanitizeMetadata"; export type { BrowserName, RecordingEntry } from "./types"; export { updateStatus } from "./updateStatus"; @@ -686,7 +690,7 @@ function removeAllRecordings(opts: Options = {}) { } function addLocalRecordingMetadata(recordingId: string, metadata: Record) { - add(recordingId, metadata); + addMetadata(recordingId, metadata); } async function updateMetadata({ @@ -727,7 +731,7 @@ async function updateMetadata({ }); const data = Object.assign(md, ...keyedObjects); - const sanitized = await sanitize(data); + const sanitized = await sanitizeMetadata(data); debug("Sanitized metadata: %O", sanitized); @@ -735,7 +739,7 @@ async function updateMetadata({ recordings.forEach(r => { maybeLog(verbose, `Setting metadata for ${r.id}`); - add(r.id, sanitized); + addMetadata(r.id, sanitized); }); } @@ -829,7 +833,6 @@ async function version() { export { ExternalRecordingEntry, - UnstructuredMetadata, addLocalRecordingMetadata, getDirectory, launchBrowser, diff --git a/packages/replay/src/metadata/env.test.ts b/packages/replay/src/metadata/env.test.ts deleted file mode 100644 index a68154aed..000000000 --- a/packages/replay/src/metadata/env.test.ts +++ /dev/null @@ -1,132 +0,0 @@ -import { firstEnvValueOf } from "./env"; - -describe("firstEnvValueOf", () => { - let env: NodeJS.ProcessEnv; - - beforeEach(() => { - env = process.env; - process.env = {}; - }); - - afterEach(() => { - process.env = env; - }); - - describe("using key names", () => { - it("returns first value", () => { - process.env = { - KEY_1: "key 1", - KEY_2: "key 2", - }; - - const resolver = firstEnvValueOf("KEY_1", "KEY_2"); - const result = resolver(); - - expect(result).toBe("key 1"); - }); - - it("ignores empty strings", () => { - process.env = { - KEY_1: "", - KEY_2: "key 2", - }; - - const resolver = firstEnvValueOf("KEY_1", "KEY_2"); - const result = resolver(); - - expect(result).toBe("key 2"); - }); - - it("ignores undefined keys", () => { - process.env = { - KEY_1: undefined, - KEY_2: "key 2", - }; - - const resolver = firstEnvValueOf("KEY_1", "KEY_2"); - const result = resolver(); - - expect(result).toBe("key 2"); - }); - - it("returns undefined when no keys match", () => { - process.env = { - KEY_1: undefined, - KEY_2: "key 2", - }; - - const resolver = firstEnvValueOf("KEY_3"); - const result = resolver(); - - expect(result).toBe(undefined); - }); - }); - - describe("using callbacks", () => { - it("returns first value", () => { - process.env = { - KEY_1: "key 1", - KEY_1_VALUE: "value 1", - KEY_2: "key 2", - KEY_2_VALUE: "value 2", - }; - - const resolver = firstEnvValueOf( - env => env.KEY_1 && env.KEY_1_VALUE, - env => env.KEY_2 && env.KEY_2_VALUE - ); - const result = resolver(); - - expect(result).toBe("value 1"); - }); - - it("ignores empty strings", () => { - process.env = { - KEY_1: "key 1", - KEY_1_VALUE: "", - KEY_2: "key 2", - KEY_2_VALUE: "value 2", - }; - - const resolver = firstEnvValueOf( - env => env.KEY_1 && env.KEY_1_VALUE, - env => env.KEY_2 && env.KEY_2_VALUE - ); - const result = resolver(); - - expect(result).toBe("value 2"); - }); - - it("ignores undefined keys", () => { - process.env = { - KEY_1: "key 1", - KEY_1_VALUE: undefined, - KEY_2: "key 2", - KEY_2_VALUE: "value 2", - }; - - const resolver = firstEnvValueOf( - env => env.KEY_1 && env.KEY_1_VALUE, - env => env.KEY_2 && env.KEY_2_VALUE - ); - const result = resolver(); - - expect(result).toBe("value 2"); - }); - - it("returns undefined when no keys match", () => { - process.env = { - KEY_1: "key 1", - KEY_2: "key 2", - }; - - const resolver = firstEnvValueOf( - env => env.KEY_1 && env.KEY_1_VALUE, - env => env.KEY_2 && env.KEY_2_VALUE - ); - const result = resolver(); - - expect(result).toBe(undefined); - }); - }); -}); diff --git a/packages/replay/src/metadata/env.ts b/packages/replay/src/metadata/env.ts index 79cc22494..181296660 100644 --- a/packages/replay/src/metadata/env.ts +++ b/packages/replay/src/metadata/env.ts @@ -1,15 +1 @@ -import { defaulted, string } from "superstruct"; - -type Resolver = string | ((env: NodeJS.ProcessEnv) => string | undefined); - -const firstEnvValueOf = - (...envKeys: Resolver[]) => - () => - envKeys.reduce( - (a, k) => a || (typeof k === "function" ? k(process.env) : process.env[k]), - undefined - ); - -const envString = (...envKeys: Resolver[]) => defaulted(string(), firstEnvValueOf(...envKeys)); - -export { firstEnvValueOf, envString }; +export * from "@replay-cli/shared/recording/metadata/legacy/env"; diff --git a/packages/replay/src/metadata/index.ts b/packages/replay/src/metadata/index.ts index 1a7d5b858..7960e149f 100644 --- a/packages/replay/src/metadata/index.ts +++ b/packages/replay/src/metadata/index.ts @@ -1,82 +1,5 @@ -import { appendFileSync } from "fs"; -import path from "path"; - -import { Options, UnstructuredMetadata } from "../types"; -import { getDirectory, maybeLog } from "../utils"; - -import * as test from "./test"; +export { sanitizeMetadata as sanitize } from "@replay-cli/shared/recording/metadata/sanitizeMetadata"; +export { addMetadata as add } from "@replay-cli/shared/recording/metadata/addMetadata"; import * as source from "./source"; - -// Each known metadata block should have a sanitizer that will check the contents before the upload -const handlers = { - test: test.validate, - source: source.validate, -}; - -type AllowedKey = keyof typeof handlers; -const ALLOWED_KEYS = Object.keys(handlers); - -function isAllowedKey(key: string): key is AllowedKey { - return ALLOWED_KEYS.includes(key); -} - -// Sanitizing arbitrary recording metadata before uploading by removing any -// non-object values (allowing null) and limiting object values to known keys or -// userspace keys prefixed by `x-`. -async function sanitize(metadata: UnstructuredMetadata, opts: Options = {}) { - const updated: UnstructuredMetadata = {}; - for (const key of Object.keys(metadata)) { - const value = metadata[key]; - - if (typeof value !== "object") { - maybeLog( - opts.verbose, - `Ignoring metadata key "${key}". Expected an object but received ${typeof value}` - ); - - continue; - } - - if (value === null || key.startsWith("x-")) { - // passthrough null or userspace types - updated[key] = value; - } else if (isAllowedKey(key)) { - // validate known types - const validated = await handlers[key](metadata as any); - Object.assign(updated, validated); - } else { - // and warn when dropping all other types - maybeLog( - opts.verbose, - `Ignoring metadata key "${key}". Custom metadata blocks must be prefixed by "x-". Try "x-${key}" instead.` - ); - } - } - - return updated; -} - -/** - * Adds unstructured metadata to the local recordings database. - * - * New metadata will be merged with existing data. If the same key is used by - * multiple entries, the most recent entry's value will be used. - * - * Metadata is not validated until the recording is uploaded so arbitrary keys - * may be used here to manage recordings before upload. - * - * @param recordingId UUID of the recording - * @param metadata Recording metadata - */ -function add(recordingId: string, metadata: UnstructuredMetadata) { - const entry = { - id: recordingId, - kind: "addMetadata", - metadata, - timestamp: Date.now(), - }; - - appendFileSync(path.join(getDirectory(), "recordings.log"), `\n${JSON.stringify(entry)}\n`); -} - -export { add, sanitize, source, test }; +import * as test from "./test"; +export { source, test }; diff --git a/packages/replay/src/metadata/source.test.ts b/packages/replay/src/metadata/source.test.ts deleted file mode 100644 index 2cbca21c4..000000000 --- a/packages/replay/src/metadata/source.test.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { init } from "./source"; - -describe("source", () => { - describe("init", () => { - describe("buildkite", () => { - it("omits merge.id when BUILDKITE_PULL_REQUEST is false", async () => { - process.env.BUILDKITE_COMMIT = "abc"; - process.env.BUILDKITE_PULL_REQUEST = "false"; - - const source = await init(); - expect(source).not.toHaveProperty("source.merge.id"); - }); - - it("includes merge.id when BUILDKITE_PULL_REQUEST is valued", async () => { - process.env.BUILDKITE_COMMIT = "abc"; - process.env.BUILDKITE_PULL_REQUEST = "123"; - - const source = await init(); - expect(source).toHaveProperty("source.merge.id", "123"); - }); - }); - }); -}); diff --git a/packages/replay/src/metadata/source.ts b/packages/replay/src/metadata/source.ts index 194a972d4..cad4bd8f5 100644 --- a/packages/replay/src/metadata/source.ts +++ b/packages/replay/src/metadata/source.ts @@ -1,378 +1,7 @@ -import { cachedFetch } from "@replay-cli/shared/cachedFetch"; -import dbg from "debug"; -import fs from "fs"; -import { create, defaulted, number, object, optional } from "superstruct"; -import { UnstructuredMetadata } from "../types"; -import { envString } from "./env"; +import { UnstructuredMetadata } from "@replay-cli/shared/recording/types"; +import * as source from "@replay-cli/shared/recording/metadata/legacy/source"; +export * from "@replay-cli/shared/recording/metadata/legacy/source"; -const defaultObject = (objStruct: any) => optional(defaulted(object(objStruct), {})); - -const debug = dbg("replay:cli:metadata:source"); -const VERSION = 1; - -class GitHubHttpError extends Error { - status: number; - statusText: string; - - constructor(status: number, statusText: string) { - super(); - this.status = status; - this.statusText = statusText; - } -} - -type CacheEntry = { json: any | null; status: number; statusText: string }; - -export const cache: Map = new Map(); - -export function resetCache(url?: string) { - if (url) { - cache.delete(url); - } else { - cache.clear(); - } -} - -function getCircleCISourceControlProvider(env: NodeJS.ProcessEnv) { - return env.CIRCLE_PULL_REQUEST?.startsWith("https://github.com") - ? "github" - : env.CIRCLE_PULL_REQUEST?.startsWith("https://bitbucket.com") - ? "bitbucket" - : undefined; -} - -function getCircleCIRepository(env: NodeJS.ProcessEnv) { - return env.CIRCLE_PROJECT_USERNAME && env.CIRCLE_PROJECT_REPONAME - ? `${env.CIRCLE_PROJECT_USERNAME}/${env.CIRCLE_PROJECT_REPONAME}` - : ""; -} - -function getCircleCIMergeId(env: NodeJS.ProcessEnv) { - if (env.CIRCLE_PULL_REQUEST) { - debug("Extracting merge id from %s", env.CIRCLE_PULL_REQUEST); - return env.CIRCLE_PULL_REQUEST.split("/").pop(); - } -} - -function getBuildkiteMessage(env: NodeJS.ProcessEnv) { - if (env.BUILDKITE_SOURCE === "webhook") { - return env.BUILDKITE_MESSAGE; - } -} - -function getBuildkiteRepository(env: NodeJS.ProcessEnv) { - return env.BUILDKITE_REPO?.match(/.*:(.*)\.git/)?.[1]; -} - -let gGitHubEvent: Record | null = null; - -function readGithubEvent(env: NodeJS.ProcessEnv) { - const { GITHUB_EVENT_PATH } = env; - if (!GITHUB_EVENT_PATH) { - debug("No github event file specified."); - return; - } - - if (!fs.existsSync(GITHUB_EVENT_PATH)) { - debug("Github event file does not exist at %s", GITHUB_EVENT_PATH); - return; - } - - try { - if (!gGitHubEvent) { - debug("Reading Github event file from %s", GITHUB_EVENT_PATH); - const contents = fs.readFileSync(GITHUB_EVENT_PATH, "utf8"); - gGitHubEvent = JSON.parse(contents); - } else { - debug("Using previously read Github event file"); - } - - return gGitHubEvent; - } catch (e) { - debug("Failed to read pull request number from event: %s", e); - } -} - -function getGitHubMergeId(env: NodeJS.ProcessEnv) { - const event = readGithubEvent(env); - if (event?.pull_request?.number) { - return String(event.pull_request.number); - } -} - -function getGitHubMergeSHA(env: NodeJS.ProcessEnv): string | undefined { - const event = readGithubEvent(env); - if (event?.pull_request?.head?.sha) { - return event.pull_request.head.sha; - } +export function validate(metadata: { source?: UnstructuredMetadata } = {}) { + return source.validate(metadata.source); } - -async function expandCommitMetadataFromGitHub(repo: string, sha?: string) { - const { - GITHUB_TOKEN, - RECORD_REPLAY_METADATA_SOURCE_COMMIT_TITLE, - RECORD_REPLAY_METADATA_SOURCE_COMMIT_URL, - RECORD_REPLAY_METADATA_SOURCE_COMMIT_USER, - } = process.env; - - if (!repo || !sha) return; - - const url = `https://api.github.com/repos/${repo}/commits/${sha}`; - - debug("Fetching commit metadata from %s with %d char token", url, GITHUB_TOKEN?.length || 0); - - const resp = await cachedFetch(url, { - headers: GITHUB_TOKEN - ? { - Authorization: `token ${GITHUB_TOKEN}`, - } - : undefined, - }); - - // override the SHA if passed because it might be the SHA from the github - // event rather than GITHUB_SHA. we update this regardless of our ability to - // fetch the details because that can fail due to a missing token. - process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_ID = sha; - if (resp.status === 200) { - const json = resp.json; - process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_TITLE = - RECORD_REPLAY_METADATA_SOURCE_COMMIT_TITLE || - json.commit.message.split("\n").shift().substring(0, 80); - process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_URL = - RECORD_REPLAY_METADATA_SOURCE_COMMIT_URL || json.html_url; - process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_USER = - RECORD_REPLAY_METADATA_SOURCE_COMMIT_USER || json.author?.login; - } else { - debug("Failed to fetch GitHub commit metadata: %s", resp.statusText); - throw new GitHubHttpError(resp.status, resp.statusText); - } -} - -async function expandMergeMetadataFromGitHub(repo: string, pr?: string) { - const { - GITHUB_TOKEN, - RECORD_REPLAY_METADATA_SOURCE_MERGE_ID, - RECORD_REPLAY_METADATA_SOURCE_MERGE_TITLE, - RECORD_REPLAY_METADATA_SOURCE_MERGE_URL, - RECORD_REPLAY_METADATA_SOURCE_MERGE_USER, - RECORD_REPLAY_METADATA_SOURCE_BRANCH, - } = process.env; - - if (!repo || !pr) { - debug("Unable to retrieve merge metadata: Repo and PR number missing"); - return; - } - - const url = `https://api.github.com/repos/${repo}/pulls/${pr}`; - - debug("Fetching merge metadata from %s with %d char token", url, GITHUB_TOKEN?.length || 0); - - const resp = await cachedFetch(url, { - headers: GITHUB_TOKEN - ? { - Authorization: `token ${GITHUB_TOKEN}`, - } - : undefined, - }); - - if (resp.status === 200) { - const json = await resp.json; - process.env.RECORD_REPLAY_METADATA_SOURCE_BRANCH = - RECORD_REPLAY_METADATA_SOURCE_BRANCH || json.head?.ref; - process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_ID = - RECORD_REPLAY_METADATA_SOURCE_MERGE_ID || pr; - process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_TITLE = - RECORD_REPLAY_METADATA_SOURCE_MERGE_TITLE || json.title; - process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_URL = - RECORD_REPLAY_METADATA_SOURCE_MERGE_URL || json.html_url; - process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_USER = - RECORD_REPLAY_METADATA_SOURCE_MERGE_USER || json.user?.login; - } else { - debug("Failed to fetch GitHub commit metadata: %o", resp); - throw new GitHubHttpError(resp.status, resp.statusText); - } -} - -function buildTestRunId(repository: string | undefined, runId: string | undefined) { - if (repository && runId) { - return `${repository}--${runId}`; - } -} - -function getTestRunIdFromEnvironment(env: NodeJS.ProcessEnv) { - const userTestRunId = - process.env.REPLAY_METADATA_TEST_RUN_ID || - process.env.RECORD_REPLAY_METADATA_TEST_RUN_ID || - process.env.RECORD_REPLAY_TEST_RUN_ID; - - let ciTestRunId = - buildTestRunId(process.env.GITHUB_REPOSITORY, process.env.GITHUB_RUN_ID) || - buildTestRunId(process.env.CIRCLE_PROJECT_REPONAME, process.env.CIRCLE_WORKFLOW_ID) || - buildTestRunId(getBuildkiteRepository(process.env), process.env.BUILDKITE_BUILD_ID) || - buildTestRunId(process.env.SEMAPHORE_GIT_REPO_SLUG, process.env.SEMAPHORE_WORKFLOW_ID); - - return userTestRunId || ciTestRunId; -} - -const versions = () => ({ - [1 as number]: object({ - branch: optional( - envString( - "RECORD_REPLAY_METADATA_SOURCE_BRANCH", - "GITHUB_REF_NAME", - "BUILDKITE_BRANCH", - "CIRCLE_BRANCH", - "SEMAPHORE_GIT_PR_BRANCH" - ) - ), - commit: defaultObject({ - id: envString( - "RECORD_REPLAY_METADATA_SOURCE_COMMIT_ID", - "GITHUB_SHA", - "BUILDKITE_COMMIT", - "CIRCLE_SHA1", - "SEMAPHORE_GIT_SHA" - ), - title: optional(envString("RECORD_REPLAY_METADATA_SOURCE_COMMIT_TITLE", getBuildkiteMessage)), - url: optional(envString("RECORD_REPLAY_METADATA_SOURCE_COMMIT_URL")), - user: optional(envString("RECORD_REPLAY_METADATA_SOURCE_COMMIT_USER")), - }), - trigger: defaultObject({ - user: optional( - envString( - "RECORD_REPLAY_METADATA_SOURCE_TRIGGER_USER", - "GITHUB_ACTOR", - "BUILDKITE_BUILD_CREATOR", - "BUILDKITE_BUILD_AUTHOR", - "CIRCLE_USERNAME", - "CIRCLE_PR_USERNAME" - ) - ), - name: optional(envString("RECORD_REPLAY_METADATA_SOURCE_TRIGGER_NAME")), - workflow: optional( - envString( - "RECORD_REPLAY_METADATA_SOURCE_TRIGGER_WORKFLOW", - "GITHUB_RUN_ID", - "BUILDKITE_BUILD_ID", - "CIRCLE_WORKFLOW_ID", - "SEMAPHORE_WORKFLOW_ID" - ) - ), - url: optional( - envString( - "RECORD_REPLAY_METADATA_SOURCE_TRIGGER_URL", - env => - env.GITHUB_WORKFLOW && - `${env.GITHUB_SERVER_URL ?? "https://github.com"}/${ - env.GITHUB_REPOSITORY - }/actions/runs/${env.GITHUB_RUN_ID}`, - "BUILDKITE_BUILD_URL", - "CIRCLE_BUILD_URL", - env => - env.SEMAPHORE_ORGANIZATION_URL && - env.SEMAPHORE_WORKFLOW_ID && - `${env.SEMAPHORE_ORGANIZATION_URL}/workflows/${env.SEMAPHORE_WORKFLOW_ID}` - ) - ), - }), - merge: defaultObject({ - id: optional( - envString( - "RECORD_REPLAY_METADATA_SOURCE_MERGE_ID", - env => - env.BUILDKITE_PULL_REQUEST && env.BUILDKITE_PULL_REQUEST !== "false" - ? env.BUILDKITE_PULL_REQUEST - : undefined, - getCircleCIMergeId, - "SEMAPHORE_GIT_PR_NUMBER" - ) - ), - title: optional( - envString("RECORD_REPLAY_METADATA_SOURCE_MERGE_TITLE", "SEMAPHORE_GIT_PR_NAME") - ), - url: optional(envString("RECORD_REPLAY_METADATA_SOURCE_MERGE_URL")), - user: optional(envString("RECORD_REPLAY_METADATA_SOURCE_MERGE_USER")), - }), - provider: optional( - envString( - "RECORD_REPLAY_METADATA_SOURCE_PROVIDER", - env => env.GITHUB_WORKFLOW && "github", - "BUILDKITE_PIPELINE_PROVIDER", - getCircleCISourceControlProvider, - "SEMAPHORE_GIT_PROVIDER" - ) - ), - repository: optional( - envString( - "RECORD_REPLAY_METADATA_SOURCE_REPOSITORY", - "GITHUB_REPOSITORY", - getBuildkiteRepository, - getCircleCIRepository, - "SEMAPHORE_GIT_REPO_SLUG" - ) - ), - version: defaulted(number(), () => 1), - }), -}); - -function validate(metadata: { source: UnstructuredMetadata }) { - if (!metadata || !metadata.source) { - throw new Error("Source metadata does not exist"); - } - - return init(metadata.source); -} - -async function expandEnvironment() { - const { CIRCLECI, CIRCLE_SHA1, GITHUB_SHA, GITHUB_REPOSITORY } = process.env; - - try { - if (GITHUB_SHA && GITHUB_REPOSITORY) { - const sha = getGitHubMergeSHA(process.env) ?? GITHUB_SHA; - const mergeId = getGitHubMergeId(process.env); - debug("GitHub context $0", { mergeId, sha }); - - await expandCommitMetadataFromGitHub(GITHUB_REPOSITORY, sha); - await expandMergeMetadataFromGitHub(GITHUB_REPOSITORY, mergeId); - } else if (CIRCLECI) { - const repo = getCircleCIRepository(process.env); - const provider = getCircleCISourceControlProvider(process.env); - - if (provider !== "github") { - debug("Unsupported source control provider: %s", process.env.CIRCLE_PULL_REQUEST); - return; - } - - await expandCommitMetadataFromGitHub(repo, CIRCLE_SHA1); - await expandMergeMetadataFromGitHub(repo, getCircleCIMergeId(process.env)); - } - } catch (e) { - if (e instanceof GitHubHttpError) { - console.warn(`Unable to fetch pull request from GitHub: ${e.statusText}`); - if (!process.env.GITHUB_TOKEN && e.status === 404) { - console.warn( - "If this is a private repo, you can set the GITHUB_TOKEN environment variable\nwith a personal access token to allow the Replay CLI to fetch this metadata." - ); - } - } - - console.warn("Failed to expand environment details", e); - } -} - -async function init(data: UnstructuredMetadata = {}) { - const version = typeof data.version === "number" ? data.version : VERSION; - - await expandEnvironment(); - const structs = versions(); - - if (structs[version]) { - return { - source: create(data, structs[version]), - }; - } else { - throw new Error(`Source metadata version ${data.version} not supported`); - } -} - -export { getTestRunIdFromEnvironment, init, validate }; diff --git a/packages/replay/src/metadata/test.ts b/packages/replay/src/metadata/test.ts new file mode 100644 index 000000000..c90d5df8e --- /dev/null +++ b/packages/replay/src/metadata/test.ts @@ -0,0 +1,7 @@ +import { UnstructuredMetadata } from "@replay-cli/shared/recording/types"; +import * as test from "@replay-cli/shared/recording/metadata/legacy/test/index"; +export * from "@replay-cli/shared/recording/metadata/legacy/test/index"; + +export function validate(metadata: { test?: UnstructuredMetadata } = {}) { + return test.validate(metadata.test); +} diff --git a/packages/replay/src/metadata/test/index.ts b/packages/replay/src/metadata/test/index.ts deleted file mode 100644 index 2f45eb23e..000000000 --- a/packages/replay/src/metadata/test/index.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { create, Struct, any } from "superstruct"; - -import { UnstructuredMetadata } from "../../types"; - -import v1, { TestMetadataV1 } from "./v1"; -import v2, { TestMetadataV2 } from "./v2"; - -const VERSION = "2.1.0"; - -export type { TestMetadataV1, TestMetadataV2 }; -export type UserActionEvent = TestMetadataV1.UserActionEvent | TestMetadataV2.UserActionEvent; -export type Test = TestMetadataV1.Test | TestMetadataV2.Test; -export type TestResult = TestMetadataV1.TestResult | TestMetadataV2.TestResult; -export type TestRun = TestMetadataV1.TestRun | TestMetadataV2.TestRun; -export type TestError = TestMetadataV1.TestError | TestMetadataV2.TestError; - -const versions = { - ...v1, - ...v2, -}; - -function validate(metadata: { test: UnstructuredMetadata }) { - if (!metadata || !metadata.test) { - throw new Error("Test metadata does not exist"); - } - - return init(metadata.test); -} - -type Metadata = (typeof versions)[keyof typeof versions]; - -function getVersion(k: string): Struct { - const v: Struct | undefined = (versions as any)[k]; - if (!v) { - console.warn(`Unable to validate unknown version of test metadata:${k} `); - return any(); - } - - return v; -} - -function init(data: Metadata | UnstructuredMetadata = {}) { - let version = VERSION; - - if ("version" in data && typeof data.version === "number") { - // explicitly adapt the pre-semver scheme - version = "1.0.0"; - } else if ("schemaVersion" in data && typeof data.schemaVersion === "string") { - version = data.schemaVersion; - } - - let schema: Struct; - try { - schema = getVersion(version); - } catch { - console.warn( - `Unable to validate unknown version of test metadata: ${version || "Unspecified"}` - ); - - return { - test: data, - }; - } - - try { - return { - test: create(data, schema), - }; - } catch (e) { - console.error(e); - console.error("Metadata:"); - console.error(JSON.stringify(data, undefined, 2)); - - return {}; - } -} - -export { validate, init }; diff --git a/packages/replay/src/metadata/test/v1.ts b/packages/replay/src/metadata/test/v1.ts deleted file mode 100644 index 8a5c1409e..000000000 --- a/packages/replay/src/metadata/test/v1.ts +++ /dev/null @@ -1,94 +0,0 @@ -import { - array, - defaulted, - enums, - number, - object, - optional, - string, - define, - any, - Infer, -} from "superstruct"; -const isUuid = require("is-uuid"); - -import { envString, firstEnvValueOf } from "../env"; - -const testResult = enums(["passed", "failed", "timedOut", "skipped", "unknown"]); -const testError = object({ - message: string(), - line: optional(number()), - column: optional(number()), -}); - -const test = object({ - id: optional(string()), - parentId: optional(string()), - title: string(), - path: optional(array(string())), - relativePath: optional(string()), - result: testResult, - error: optional(testError), - relativeStartTime: optional(number()), - duration: optional(number()), - steps: optional(array(any())), -}); - -const v1_0_0 = object({ - suite: optional(envString("RECORD_REPLAY_METADATA_TEST_SUITE")), - file: optional(envString("RECORD_REPLAY_METADATA_TEST_FILE")), - title: envString("RECORD_REPLAY_METADATA_TEST_TITLE"), - path: optional(array(string())), - result: defaulted( - enums(["passed", "failed", "timedOut", "skipped", "unknown"]), - firstEnvValueOf("RECORD_REPLAY_METADATA_TEST_RESULT") - ), - // before/after all hooks - hooks: optional( - array( - object({ - title: string(), - path: array(string()), - steps: optional(array(any())), - }) - ) - ), - tests: optional(array(test)), - runner: optional( - defaulted( - object({ - name: optional(envString("RECORD_REPLAY_METADATA_TEST_RUNNER_NAME")), - version: optional(envString("RECORD_REPLAY_METADATA_TEST_RUNNER_VERSION")), - plugin: optional(envString("RECORD_REPLAY_METADATA_TEST_RUNNER_PLUGIN")), - }), - {} - ) - ), - run: optional( - defaulted( - object({ - id: defaulted( - define("uuid", (v: any) => isUuid.v4(v)), - firstEnvValueOf("RECORD_REPLAY_METADATA_TEST_RUN_ID", "RECORD_REPLAY_TEST_RUN_ID") - ), - title: optional(envString("RECORD_REPLAY_METADATA_TEST_RUN_TITLE")), - mode: optional(envString("RECORD_REPLAY_METADATA_TEST_RUN_MODE")), - }), - {} - ) - ), - reporterErrors: defaulted(array(any()), []), - version: defaulted(number(), () => 1), -}); - -export namespace TestMetadataV1 { - export type UserActionEvent = any; - export type Test = Infer; - export type TestResult = Infer; - export type TestRun = Infer; - export type TestError = Infer; -} - -export default { - "1.0.0": v1_0_0, -}; diff --git a/packages/replay/src/metadata/test/v2.ts b/packages/replay/src/metadata/test/v2.ts deleted file mode 100644 index 89ca3e7f8..000000000 --- a/packages/replay/src/metadata/test/v2.ts +++ /dev/null @@ -1,155 +0,0 @@ -import { - Infer, - array, - assign, - defaulted, - enums, - nullable, - number, - object, - optional, - record, - string, -} from "superstruct"; - -import { firstEnvValueOf } from "../env"; - -const testError = object({ - name: string(), - message: string(), - line: optional(number()), - column: optional(number()), -}); - -const userActionEvent = object({ - data: object({ - id: string(), - parentId: nullable(string()), - category: enums(["assertion", "command", "other"]), - command: object({ - arguments: array(string()), - name: string(), - }), - scope: nullable(array(string())), - error: nullable(testError), - }), -}); - -const testResult = enums(["failed", "passed", "skipped", "timedOut", "unknown"]); - -const test_v2_0_0 = object({ - events: object({ - afterAll: array(userActionEvent), - afterEach: array(userActionEvent), - beforeAll: array(userActionEvent), - beforeEach: array(userActionEvent), - main: array(userActionEvent), - }), - approximateDuration: number(), - result: testResult, - source: object({ - scope: array(string()), - title: string(), - }), - error: nullable(testError), -}); - -const v2_0_0 = object({ - approximateDuration: number(), - environment: object({ - errors: defaulted( - array( - object({ - code: number(), - detail: nullable(string()), - name: string(), - message: string(), - }) - ), - [] - ), - pluginVersion: string(), - testRunner: object({ - name: string(), - version: string(), - }), - }), - result: testResult, - resultCounts: record(testResult, number()), - schemaVersion: defaulted(string(), () => "2.0.0"), - source: object({ - path: string(), - title: string(), - }), - tests: array(test_v2_0_0), - run: defaulted( - object({ - id: defaulted( - string(), - firstEnvValueOf( - "REPLAY_METADATA_TEST_RUN_ID", - "RECORD_REPLAY_METADATA_TEST_RUN_ID", - "RECORD_REPLAY_TEST_RUN_ID" - ) - ), - title: optional( - defaulted( - string(), - firstEnvValueOf("REPLAY_METADATA_TEST_RUN_TITLE", "RECORD_REPLAY_METADATA_TEST_RUN_TITLE") - ) - ), - mode: optional( - defaulted( - string(), - firstEnvValueOf("REPLAY_METADATA_TEST_RUN_MODE", "RECORD_REPLAY_METADATA_TEST_RUN_MODE") - ) - ), - }), - {} - ), -}); - -const test_v2_1_0 = assign( - test_v2_0_0, - object({ - id: number(), - attempt: number(), - }) -); - -const test_v2_2_0 = assign( - test_v2_1_0, - object({ - executionId: string(), - executionGroupId: string(), - maxAttempts: number(), - }) -); - -const v2_1_0 = assign( - v2_0_0, - object({ - tests: array(test_v2_1_0), - }) -); - -const v2_2_0 = assign( - v2_1_0, - object({ - tests: array(test_v2_2_0), - }) -); - -export namespace TestMetadataV2 { - export type UserActionEvent = Infer; - export type Test = Infer; - export type TestResult = Infer; - export type TestRun = Infer; - export type TestError = Infer; -} - -export default { - "2.2.0": v2_2_0, - "2.1.0": v2_1_0, - "2.0.0": v2_0_0, -}; diff --git a/packages/replay/src/types.ts b/packages/replay/src/types.ts index d9f5380bd..2cc69e247 100644 --- a/packages/replay/src/types.ts +++ b/packages/replay/src/types.ts @@ -1,7 +1,6 @@ +import { UnstructuredMetadata } from "@replay-cli/shared/recording/types"; import type { AgentOptions } from "http"; -export type UnstructuredMetadata = Record; - export interface Options { /** * Alternate recording directory diff --git a/packages/replay/src/upload.ts b/packages/replay/src/upload.ts index ee0b91b83..c1254bd5a 100644 --- a/packages/replay/src/upload.ts +++ b/packages/replay/src/upload.ts @@ -8,7 +8,7 @@ import path from "path"; import { Worker } from "worker_threads"; import ProtocolClient from "./client"; import dbg, { logPath } from "./debug"; -import { sanitize as sanitizeMetadata } from "./metadata"; +import { sanitizeMetadata } from "@replay-cli/shared/recording/metadata/sanitizeMetadata"; import { Options, OriginalSourceEntry, RecordingMetadata, SourceMapEntry } from "./types"; import { defer, getUserAgent, isValidUUID, maybeLog } from "./utils"; @@ -173,7 +173,7 @@ class ReplayClient { body: file, }); - if (resp.status !== 200) { + if (!resp.ok) { debug(await resp.text()); throw new Error(`Failed to upload recording. Response was ${resp.status} ${resp.statusText}`); } diff --git a/packages/replay/src/uploadWorker.ts b/packages/replay/src/uploadWorker.ts index 26b31ebb3..2d925e345 100644 --- a/packages/replay/src/uploadWorker.ts +++ b/packages/replay/src/uploadWorker.ts @@ -51,7 +51,7 @@ parentPort.on( debug(`Fetch response received. Status: ${resp.status}, Status Text: ${resp.statusText}`); - if (resp.status !== 200) { + if (!resp.ok) { const respText = await resp.text(); debug(`Fetch response text: ${respText}`); throw new Error(`Failed to upload recording. Response was ${resp.status} ${resp.statusText}`); diff --git a/packages/replayio/src/utils/browser/reportBrowserCrash.ts b/packages/replayio/src/utils/browser/reportBrowserCrash.ts index 2b5ab7140..c7607f16e 100644 --- a/packages/replayio/src/utils/browser/reportBrowserCrash.ts +++ b/packages/replayio/src/utils/browser/reportBrowserCrash.ts @@ -55,7 +55,7 @@ export async function reportBrowserCrash(stderr: string) { }, body: formData, }); - if (response.status >= 200 && response.status < 300) { + if (response.ok) { return { errorLogPath, uploaded: true, diff --git a/packages/shared/src/cachedFetch.test.ts b/packages/shared/src/cachedFetch.test.ts index 8768f69d1..53a7b5ac8 100644 --- a/packages/shared/src/cachedFetch.test.ts +++ b/packages/shared/src/cachedFetch.test.ts @@ -4,24 +4,13 @@ import type { resetCache as resetCacheStatic, } from "./cachedFetch"; -class Response { - public status: number; - public statusText: string; +const { Response }: typeof import("undici") = jest.requireActual("undici"); - constructor(status: number, statusText: string) { - this.status = status; - this.statusText = statusText; - } +const createMockedResponse = (status: number, text: string) => + new Response(JSON.stringify({ text }), { status, statusText: text }); - async json() { - return { - text: this.statusText, - }; - } -} - -const failedResponse = new Response(500, "error"); -const successResponse = new Response(200, "ok"); +const createFailedResponse = () => createMockedResponse(500, "error"); +const createSuccessResponse = () => createMockedResponse(200, "ok"); describe("cachedFetch", () => { let cachedFetch: typeof cachedFetchStatic; @@ -32,7 +21,7 @@ describe("cachedFetch", () => { jest.mock("undici"); mockFetch = require("undici").fetch; - mockFetch.mockReturnValue(successResponse); + mockFetch.mockReturnValue(createSuccessResponse()); ({ cachedFetch, resetCache } = require("./cachedFetch")); }); @@ -52,6 +41,7 @@ describe("cachedFetch", () => { "json": Object { "text": "ok", }, + "ok": true, "status": 200, "statusText": "ok", } @@ -59,7 +49,7 @@ describe("cachedFetch", () => { }); it("should retry after a failed request", async () => { - mockFetch.mockReturnValueOnce(Promise.resolve(failedResponse)); + mockFetch.mockReturnValueOnce(Promise.resolve(createFailedResponse())); const response = await cachedFetch("https://www.test.com", undefined, { baseDelay: 10, @@ -72,6 +62,7 @@ describe("cachedFetch", () => { "json": Object { "text": "ok", }, + "ok": true, "status": 200, "statusText": "ok", } @@ -79,7 +70,7 @@ describe("cachedFetch", () => { }); it("should return a failed response after retries have been exhausted", async () => { - mockFetch.mockReturnValue(Promise.resolve(failedResponse)); + mockFetch.mockReturnValue(Promise.resolve(createFailedResponse())); const response = await cachedFetch("https://www.test.com", undefined, { baseDelay: 10, @@ -91,6 +82,7 @@ describe("cachedFetch", () => { expect(response).toMatchInlineSnapshot(` Object { "json": null, + "ok": false, "status": 500, "statusText": "error", } @@ -109,7 +101,7 @@ describe("cachedFetch", () => { }); it("should cache a failed response", async () => { - mockFetch.mockReturnValue(Promise.resolve(failedResponse)); + mockFetch.mockReturnValue(Promise.resolve(createFailedResponse())); const response = await cachedFetch("https://www.test.com", undefined, { baseDelay: 10, @@ -125,10 +117,10 @@ describe("cachedFetch", () => { }); it("should allow a single cached value to be evicted", async () => { - mockFetch.mockReturnValue(new Response(200, "A")); + mockFetch.mockReturnValue(createMockedResponse(200, "A")); const responseA = await cachedFetch("https://www.test.com/A"); - mockFetch.mockReturnValue(new Response(200, "B")); + mockFetch.mockReturnValue(createMockedResponse(200, "B")); const responseB = await cachedFetch("https://www.test.com/B"); expect(mockFetch).toHaveBeenCalledTimes(2); @@ -146,7 +138,7 @@ describe("cachedFetch", () => { }); it("should allow the caller to decide if a retry should be attempted", async () => { - mockFetch.mockReturnValue(Promise.resolve(failedResponse)); + mockFetch.mockReturnValue(Promise.resolve(createFailedResponse())); let retryCount = 0; @@ -164,6 +156,7 @@ describe("cachedFetch", () => { expect(response).toMatchInlineSnapshot(` Object { "json": null, + "ok": false, "status": 500, "statusText": "error", } @@ -171,7 +164,7 @@ describe("cachedFetch", () => { }); it("should still honor the maxAttempts setting even when shouldRetry is provided", async () => { - mockFetch.mockReturnValue(Promise.resolve(failedResponse)); + mockFetch.mockReturnValue(Promise.resolve(createFailedResponse())); const response = await cachedFetch("https://www.test.com", undefined, { maxAttempts: 2, @@ -182,6 +175,7 @@ describe("cachedFetch", () => { expect(response).toMatchInlineSnapshot(` Object { "json": null, + "ok": false, "status": 500, "statusText": "error", } diff --git a/packages/shared/src/cachedFetch.ts b/packages/shared/src/cachedFetch.ts index c99f4f3c4..a65c78829 100644 --- a/packages/shared/src/cachedFetch.ts +++ b/packages/shared/src/cachedFetch.ts @@ -1,62 +1,110 @@ import { fetch } from "undici"; +import { createDeferred, Deferred } from "./async/createDeferred"; +import { timeoutAfter } from "./async/timeoutAfter"; -type CacheEntry = { json: any | null; status: number; statusText: string }; +type CacheEntry = { + json: any | null; + ok: boolean; + status: number; + statusText: string; +}; -export const cache: Map = new Map(); +export const cache: Map< + string, + | { + deferred: Deferred; + entry: undefined; + } + | { deferred: undefined; entry: CacheEntry } +> = new Map(); + +type ShouldRetryValue = boolean | { after: number }; -// Note that this method should not be used for GraphQL queries because it caches responses by URL. -// TODO [PRO-676] Move this into the "shared" package +/** This method should not be used for GraphQL queries because it caches responses by URL. */ export async function cachedFetch( url: string, init?: RequestInit, options: { baseDelay?: number; maxAttempts?: number; - shouldRetry?: (response: Response) => Promise; + shouldRetry?: ( + response: Response, + json: unknown, + defaultDelay: number + ) => Promise | ShouldRetryValue; } = {} ): Promise { const { baseDelay = 1_000, maxAttempts = 3, shouldRetry: shouldRetryFn } = options; - let attempt = 0; + const cached = cache.get(url); - while (!cache.has(url)) { - attempt++; + if (cached) { + if (cached.entry) { + return cached.entry; + } + return cached.deferred.promise; + } + + const deferred = createDeferred(); + cache.set(url, { deferred, entry: undefined }); + + let attempt = 1; + + while (true) { + // Retry with exponential backoff (e.g. 1s, 2s, 4s, ...) by default + let retryAfter = Math.pow(2, attempt) * baseDelay; + try { + const resp = await fetch(url, init); + const json = await resp.json().catch(() => null); + + if (resp.ok) { + return storeCachedEntry(url, deferred, { + json, + ok: true, + status: resp.status, + statusText: resp.statusText, + }); + } + + let shouldRetry = attempt < maxAttempts; + + if (shouldRetry && shouldRetryFn) { + const shouldRetryResult = await shouldRetryFn(resp, json, retryAfter); + + shouldRetry = !!shouldRetryResult; - const resp = await fetch(url, init); - if (resp.status === 200) { - const json = await resp.json(); - cache.set(url, { - json, - status: resp.status, - statusText: resp.statusText, - }); - } else if (attempt < maxAttempts) { - if (shouldRetryFn) { - const shouldRetry = await shouldRetryFn(resp); if (!shouldRetry) { - cache.set(url, { - json: null, + return storeCachedEntry(url, deferred, { + json, + ok: false, status: resp.status, statusText: resp.statusText, }); - break; + } + if (typeof shouldRetryResult === "object" && typeof shouldRetryResult.after === "number") { + retryAfter = shouldRetryResult.after; } } - - // Retry with exponential backoff (e.g. 1s, 2s, 4s, ...) - const delay = Math.pow(2, attempt) * baseDelay; - await new Promise(resolve => setTimeout(resolve, delay)); - } else { // If we've run out of retries, store and return the error - cache.set(url, { - json: null, - status: resp.status, - statusText: resp.statusText, - }); + if (!shouldRetry) { + return storeCachedEntry(url, deferred, { + json, + ok: false, + status: resp.status, + statusText: resp.statusText, + }); + } + } catch (error) { + // most likely it's a network failure, there is no need to call shouldRetryFn + // but we should only retry if we haven't reached the maxAttempts yet + if (attempt === maxAttempts) { + throw error; + } } - } - return cache.get(url)!; + await timeoutAfter(retryAfter); + attempt++; + } } export function resetCache(url?: string) { @@ -66,3 +114,13 @@ export function resetCache(url?: string) { cache.clear(); } } + +function storeCachedEntry( + url: string, + deferred: Deferred, + entry: CacheEntry +) { + deferred.resolve(entry); + cache.set(url, { deferred: undefined, entry }); + return entry; +} diff --git a/packages/shared/src/recording/metadata/addMetadata.ts b/packages/shared/src/recording/metadata/addMetadata.ts new file mode 100644 index 000000000..3fd5cf5d5 --- /dev/null +++ b/packages/shared/src/recording/metadata/addMetadata.ts @@ -0,0 +1,27 @@ +import path from "node:path"; +import { appendFileSync } from "node:fs"; +import { UnstructuredMetadata } from "../types"; +import { getReplayPath } from "../../getReplayPath"; + +/** + * Adds unstructured metadata to the local recordings database. + * + * New metadata will be merged with existing data. If the same key is used by + * multiple entries, the most recent entry's value will be used. + * + * Metadata is not validated until the recording is uploaded so arbitrary keys + * may be used here to manage recordings before upload. + * + * @param recordingId UUID of the recording + * @param metadata Recording metadata + */ +export function addMetadata(recordingId: string, metadata: UnstructuredMetadata) { + const entry = { + id: recordingId, + kind: "addMetadata", + metadata, + timestamp: Date.now(), + }; + + appendFileSync(path.join(getReplayPath(), "recordings.log"), `\n${JSON.stringify(entry)}\n`); +} diff --git a/packages/shared/src/recording/metadata/legacy/source.ts b/packages/shared/src/recording/metadata/legacy/source.ts index 190ea21d8..ed0da410d 100644 --- a/packages/shared/src/recording/metadata/legacy/source.ts +++ b/packages/shared/src/recording/metadata/legacy/source.ts @@ -1,5 +1,5 @@ import fs from "fs"; -import { create, defaulted, number, object, optional, Struct } from "superstruct"; +import { create, defaulted, number, object, optional } from "superstruct"; import { cachedFetch } from "../../../cachedFetch"; import { logger } from "../../../logger"; import { UnstructuredMetadata } from "../../types"; @@ -11,13 +11,22 @@ const VERSION = 1; class GitHubHttpError extends Error { status: number; - statusText: string; - - constructor(status: number, statusText: string) { - super(); + constructor(message: string, status: number) { + super(message); this.status = status; - this.statusText = statusText; + } +} + +type CacheEntry = { json: any | null; status: number; statusText: string }; + +export const cache: Map = new Map(); + +export function resetCache(url?: string) { + if (url) { + cache.delete(url); + } else { + cache.clear(); } } @@ -36,8 +45,9 @@ function getCircleCIRepository(env: NodeJS.ProcessEnv) { } function getCircleCIMergeId(env: NodeJS.ProcessEnv) { + logger.info("GetCircleCIMergeId:Started"); if (env.CIRCLE_PULL_REQUEST) { - logger.debug(`Extracting merge id from ${env.CIRCLE_PULL_REQUEST}`); + logger.info("GetCircleCIMergeId:WillExtract", { circlePullRequest: env.CIRCLE_PULL_REQUEST }); return env.CIRCLE_PULL_REQUEST.split("/").pop(); } } @@ -55,36 +65,71 @@ function getBuildkiteRepository(env: NodeJS.ProcessEnv) { let gGitHubEvent: Record | null = null; function readGithubEvent(env: NodeJS.ProcessEnv) { + logger.info("ReadGithubEvent:Started"); const { GITHUB_EVENT_PATH } = env; if (!GITHUB_EVENT_PATH) { - logger.debug("No github event file specified."); + logger.info("ReadGithubEvent:NoEventFileSpecified"); return; } if (!fs.existsSync(GITHUB_EVENT_PATH)) { - logger.debug(`Github event file does not exist at ${GITHUB_EVENT_PATH}`); + logger.info("ReadGithubEvent:EventFileNotFound", { githubEventPath: GITHUB_EVENT_PATH }); return; } try { if (!gGitHubEvent) { - logger.debug(`Reading Github event file from ${GITHUB_EVENT_PATH}`); + logger.info("ReadGithubEvent:WillReadFromFile", { githubEventPath: GITHUB_EVENT_PATH }); const contents = fs.readFileSync(GITHUB_EVENT_PATH, "utf8"); gGitHubEvent = JSON.parse(contents); } else { - logger.debug("Using previously read Github event file"); + logger.info("ReadGithubEvent:WillUseExistingFile"); } return gGitHubEvent; } catch (error) { - logger.debug("Failed to read pull request number from event", { error }); + logger.error("ReadGithubEvent:Failed", { error }); } } -function getGitHubMergeId(env: NodeJS.ProcessEnv) { - const event = readGithubEvent(env); - if (event?.pull_request?.number) { - return String(event.pull_request.number); +function expandGitHubEvent() { + const event = readGithubEvent(process.env); + + if (event?.pull_request) { + if (event.pull_request.number) { + process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_ID ||= String(event.pull_request.number); + } + + if (event.pull_request.title) { + process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_TITLE ||= event.pull_request.title; + } + + if (event.pull_request.html_url) { + process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_URL ||= event.pull_request.html_url; + } + + if (event.pull_request.user?.login) { + process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_USER ||= event.pull_request.user.login; + } + + if (event.pull_request.head?.ref) { + process.env.RECORD_REPLAY_METADATA_SOURCE_BRANCH ||= event.pull_request.head.ref; + } + } + + if (event?.head_commit) { + if (event.head_commit.message) { + process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_TITLE ||= event.head_commit.message; + } + + if (event.head_commit.url) { + process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_URL ||= event.head_commit.url; + } + + if (event.head_commit.committer.username) { + process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_USER ||= + event.head_commit.committer.username; + } } } @@ -95,6 +140,60 @@ function getGitHubMergeSHA(env: NodeJS.ProcessEnv): string | undefined { } } +function isSecondaryRateLimitError(json: unknown) { + return ( + !!json && + typeof json === "object" && + "message" in json && + typeof json.message === "string" && + // https://github.com/octokit/plugin-throttling.js/blob/2970c6fbc2e2ad4e749804b0708c1a863800a7e4/src/index.ts#L134 + /\bsecondary rate\b/i.test(json.message) + ); +} + +async function fetchGitHubUrl(url: string) { + const { GITHUB_TOKEN } = process.env; + + const resp = await cachedFetch( + url, + { + headers: GITHUB_TOKEN + ? { + Authorization: `token ${GITHUB_TOKEN}`, + } + : undefined, + }, + { + shouldRetry: async (response, json, retryAfter) => { + // secondary rate limit can be returned with 403 so we hve to check this before checking status codes + // https://docs.github.com/en/rest/using-the-rest-api/troubleshooting-the-rest-api?apiVersion=2022-11-28#rate-limit-errors + if (isSecondaryRateLimitError(json)) { + // https://github.com/octokit/plugin-throttling.js/blob/32c82b80a29a7a48c1cdf100fe0a3fb01b24fb23/src/index.ts#L138-L142 + return { + after: Number(response.headers.get("retry-after")) || 60 * 1000, + }; + } + if (response.headers.get("x-ratelimit-remaining") === "0") { + // https://github.com/octokit/plugin-throttling.js/blob/32c82b80a29a7a48c1cdf100fe0a3fb01b24fb23/src/index.ts#L163-171 + const rateLimitReset = new Date( + ~~(response.headers.get("x-ratelimit-reset") as string) * 1000 + ).getTime(); + return { + after: Math.max(Math.ceil((rateLimitReset - Date.now()) / 1000) + 1, 0), + }; + } + // https://github.com/octokit/plugin-retry.js/blob/d3577fcc8e6f602af3a959dbd1d8e7479971d0d5/src/error-request.ts#L9-L10 + // https://github.com/octokit/plugin-retry.js/blob/d3577fcc8e6f602af3a959dbd1d8e7479971d0d5/src/index.ts#L14 + if ([400, 401, 403, 404, 422, 451].includes(response.status)) { + return false; + } + return { after: retryAfter }; + }, + } + ); + return resp; +} + async function expandCommitMetadataFromGitHub(repo: string, sha?: string) { const { GITHUB_TOKEN, @@ -103,36 +202,50 @@ async function expandCommitMetadataFromGitHub(repo: string, sha?: string) { RECORD_REPLAY_METADATA_SOURCE_COMMIT_USER, } = process.env; - if (!repo || !sha) return; + if ( + [ + RECORD_REPLAY_METADATA_SOURCE_COMMIT_TITLE, + RECORD_REPLAY_METADATA_SOURCE_COMMIT_URL, + RECORD_REPLAY_METADATA_SOURCE_COMMIT_USER, + ].every(Boolean) + ) { + return; + } - const url = `https://api.github.com/repos/${repo}/commits/${sha}`; + if (!repo || !sha) { + logger.error("ExpandCommitMetadataFromGitHub:MissingInfo", { hasRepo: !!repo, hasSha: !!sha }); + return; + } - logger.debug(`Fetching commit metadata from ${url} with ${GITHUB_TOKEN?.length || 0} char token`); + const url = `https://api.github.com/repos/${repo}/commits/${sha}`; - const resp = await cachedFetch(url, { - headers: GITHUB_TOKEN - ? { - Authorization: `token ${GITHUB_TOKEN}`, - } - : undefined, + logger.info("ExpandCommitMetadataFromGitHub:Started", { + url, + tokenLength: GITHUB_TOKEN?.length || 0, }); + const resp = await fetchGitHubUrl(url); + // override the SHA if passed because it might be the SHA from the github // event rather than GITHUB_SHA. we update this regardless of our ability to // fetch the details because that can fail due to a missing token. process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_ID = sha; - if (resp.status === 200) { + if (resp.ok) { const json = resp.json; - process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_TITLE = - RECORD_REPLAY_METADATA_SOURCE_COMMIT_TITLE || - json.commit.message.split("\n").shift().substring(0, 80); - process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_URL = - RECORD_REPLAY_METADATA_SOURCE_COMMIT_URL || json.html_url; - process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_USER = - RECORD_REPLAY_METADATA_SOURCE_COMMIT_USER || json.author?.login; + process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_TITLE ||= json.commit.message + .split("\n") + .shift() + .substring(0, 80); + process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_URL ||= json.html_url; + process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_USER ||= json.author?.login; } else { - logger.debug("Failed to fetch GitHub commit metadata", { response: resp }); - throw new GitHubHttpError(resp.status, resp.statusText); + const message = resp.json?.message ?? resp.statusText; + logger.error("ExpandCommitMetadataFromGitHub:Failed", { + message, + responseStatusText: resp.statusText, + responseStatus: resp.status, + }); + throw new GitHubHttpError(message, resp.status); } } @@ -146,38 +259,47 @@ async function expandMergeMetadataFromGitHub(repo: string, pr?: string) { RECORD_REPLAY_METADATA_SOURCE_BRANCH, } = process.env; + if ( + [ + RECORD_REPLAY_METADATA_SOURCE_MERGE_ID, + RECORD_REPLAY_METADATA_SOURCE_MERGE_TITLE, + RECORD_REPLAY_METADATA_SOURCE_MERGE_URL, + RECORD_REPLAY_METADATA_SOURCE_MERGE_USER, + RECORD_REPLAY_METADATA_SOURCE_BRANCH, + ].every(Boolean) + ) { + return; + } + if (!repo || !pr) { - logger.debug("Unable to retrieve merge metadata: Repo and PR number missing"); + logger.error("ExpandMergeMetadataFromGitHub:MissingInfo", { hasRepo: !!repo, hasPr: !!pr }); return; } const url = `https://api.github.com/repos/${repo}/pulls/${pr}`; - logger.debug(`Fetching merge metadata from ${url} with ${GITHUB_TOKEN?.length || 0} char token`); - - const resp = await cachedFetch(url, { - headers: GITHUB_TOKEN - ? { - Authorization: `token ${GITHUB_TOKEN}`, - } - : undefined, + logger.info("ExpandMergeMetadataFromGitHub:WillFetch", { + url, + tokenLength: GITHUB_TOKEN?.length || 0, }); - if (resp.status === 200) { + const resp = await fetchGitHubUrl(url); + + if (resp.ok) { const json = await resp.json; - process.env.RECORD_REPLAY_METADATA_SOURCE_BRANCH = - RECORD_REPLAY_METADATA_SOURCE_BRANCH || json.head?.ref; - process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_ID = - RECORD_REPLAY_METADATA_SOURCE_MERGE_ID || pr; - process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_TITLE = - RECORD_REPLAY_METADATA_SOURCE_MERGE_TITLE || json.title; - process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_URL = - RECORD_REPLAY_METADATA_SOURCE_MERGE_URL || json.html_url; - process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_USER = - RECORD_REPLAY_METADATA_SOURCE_MERGE_USER || json.user?.login; + process.env.RECORD_REPLAY_METADATA_SOURCE_BRANCH ||= json.head?.ref; + process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_ID ||= pr; + process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_TITLE ||= json.title; + process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_URL ||= json.html_url; + process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_USER ||= json.user?.login; } else { - logger.debug("Failed to fetch GitHub commit metadata", { response: resp }); - throw new GitHubHttpError(resp.status, resp.statusText); + const message = resp.json?.message ?? resp.statusText; + logger.error("ExpandMergeMetadataFromGitHub:Failed", { + message, + responseStatus: resp.status, + responseStatusText: resp.statusText, + }); + throw new GitHubHttpError(message, resp.status); } } @@ -202,8 +324,8 @@ export function getTestRunIdFromEnvironment(env: NodeJS.ProcessEnv) { return userTestRunId || ciTestRunId; } -const versions: () => Record> = () => ({ - 1: object({ +const versions = () => ({ + [1 as number]: object({ branch: optional( envString( "RECORD_REPLAY_METADATA_SOURCE_BRANCH", @@ -303,7 +425,7 @@ const versions: () => Record> = () => ({ }), }); -export function validate(source: UnstructuredMetadata) { +export function validate(source?: UnstructuredMetadata) { if (!source) { throw new Error("Source metadata does not exist"); } @@ -316,9 +438,10 @@ async function expandEnvironment() { try { if (GITHUB_SHA && GITHUB_REPOSITORY) { + expandGitHubEvent(); const sha = getGitHubMergeSHA(process.env) ?? GITHUB_SHA; - const mergeId = getGitHubMergeId(process.env); - logger.debug("GitHub context $0", { mergeId, sha }); + const mergeId = process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_ID; + logger.info("ExpandEnvironment:GithubContext", { mergeId, sha }); await expandCommitMetadataFromGitHub(GITHUB_REPOSITORY, sha); await expandMergeMetadataFromGitHub(GITHUB_REPOSITORY, mergeId); @@ -327,7 +450,9 @@ async function expandEnvironment() { const provider = getCircleCISourceControlProvider(process.env); if (provider !== "github") { - logger.debug(`Unsupported source control provider: ${process.env.CIRCLE_PULL_REQUEST}`); + logger.error("ExpandEnvironment:UnsupportedSourceControlProvider", { + circlePullRequest: process.env.CIRCLE_PULL_REQUEST, + }); return; } @@ -336,12 +461,13 @@ async function expandEnvironment() { } } catch (e) { if (e instanceof GitHubHttpError) { - console.warn(`Unable to fetch pull request from GitHub: ${e.statusText}`); + console.warn(`Unable to fetch pull request from GitHub: ${e.message}`); if (!process.env.GITHUB_TOKEN && e.status === 404) { console.warn( "If this is a private repo, you can set the GITHUB_TOKEN environment variable\nwith a personal access token to allow the Replay CLI to fetch this metadata." ); } + return; } console.warn("Failed to expand environment details", e); diff --git a/packages/shared/src/recording/metadata/legacy/test/index.ts b/packages/shared/src/recording/metadata/legacy/test/index.ts index 4fa384130..25979f831 100644 --- a/packages/shared/src/recording/metadata/legacy/test/index.ts +++ b/packages/shared/src/recording/metadata/legacy/test/index.ts @@ -17,7 +17,7 @@ const versions = { ...v2, }; -export function validate(test: UnstructuredMetadata) { +export function validate(test?: UnstructuredMetadata) { if (!test) { throw new Error("Test metadata does not exist"); } diff --git a/packages/shared/src/recording/metadata/sanitizeMetadata.ts b/packages/shared/src/recording/metadata/sanitizeMetadata.ts index 1c945bb8a..433c1ab36 100644 --- a/packages/shared/src/recording/metadata/sanitizeMetadata.ts +++ b/packages/shared/src/recording/metadata/sanitizeMetadata.ts @@ -3,13 +3,20 @@ import { UnstructuredMetadata } from "../types"; import { validate as validateSource } from "./legacy/source"; import { validate as validateTest } from "./legacy/test"; -export async function sanitizeMetadata(metadata: UnstructuredMetadata) { +type Options = { + verbose?: boolean; +}; + +export async function sanitizeMetadata(metadata: UnstructuredMetadata, opts: Options = {}) { const updated: UnstructuredMetadata = {}; for (const [key, value] of Object.entries(metadata)) { if (typeof value !== "object") { - logger.debug( - `Ignoring metadata key "${key}". Expected an object but received ${typeof value}` - ); + if (opts.verbose) { + logger.log( + `Ignoring metadata key "${key}". Expected an object but received ${typeof value}` + ); + } + logger.info("SanitizeMetadata:UnexpectedKeyType", { key, keyType: typeof value }); continue; } @@ -19,7 +26,7 @@ export async function sanitizeMetadata(metadata: UnstructuredMetadata) { switch (key) { case "source": { try { - const validated = await validateSource(value as UnstructuredMetadata); + const validated = await validateSource(value as UnstructuredMetadata | undefined); Object.assign(updated, validated); } catch (error) { logger.debug("Source validation failed", { error }); @@ -28,7 +35,7 @@ export async function sanitizeMetadata(metadata: UnstructuredMetadata) { } case "test": { try { - const validated = await validateTest(value as UnstructuredMetadata); + const validated = await validateTest(value as UnstructuredMetadata | undefined); Object.assign(updated, validated); } catch (error) { logger.debug("Test validation failed", { error }); @@ -36,9 +43,12 @@ export async function sanitizeMetadata(metadata: UnstructuredMetadata) { break; } default: { - logger.debug( - `Ignoring metadata key "${key}". Custom metadata blocks must be prefixed by "x-". Try "x-${key}" instead.` - ); + if (opts.verbose) { + console.log( + `Ignoring metadata key "${key}". Custom metadata blocks must be prefixed by "x-". Try "x-${key}" instead.` + ); + } + logger.info("SanitizeMetadata:IgnoringKey", { key }); } } } diff --git a/packages/shared/src/recording/upload/uploadRecording.ts b/packages/shared/src/recording/upload/uploadRecording.ts index 1a48d89ac..b89ec568e 100644 --- a/packages/shared/src/recording/upload/uploadRecording.ts +++ b/packages/shared/src/recording/upload/uploadRecording.ts @@ -311,7 +311,7 @@ async function uploadRecordingReadStream( logger.debug("Fetch response received", { response }); - if (response.status !== 200) { + if (!response.ok) { const respText = await response.text(); logger.debug(`Fetch response text: ${respText}`); throw new Error( diff --git a/packages/sourcemap-upload/src/index.ts b/packages/sourcemap-upload/src/index.ts index 055a96cab..e3f3387af 100644 --- a/packages/sourcemap-upload/src/index.ts +++ b/packages/sourcemap-upload/src/index.ts @@ -395,7 +395,7 @@ async function uploadSourcemapToAPI( throw new Error("Unexpected error processing upload response"); } - if (response.status !== 200) { + if (!response.ok) { debug("Failure uploading sourcemap for %s, got %O", map.absPath, obj); throw new Error( typeof obj.error === "string" ? obj.error : "Unknown upload error" diff --git a/packages/test-utils/src/index.ts b/packages/test-utils/src/index.ts index 8ed87b00c..ba1ed2233 100644 --- a/packages/test-utils/src/index.ts +++ b/packages/test-utils/src/index.ts @@ -1,4 +1,5 @@ -export type { TestMetadataV1, TestMetadataV2 } from "./legacy-cli/metadata/test"; +export type { TestMetadataV1 } from "@replay-cli/shared/recording/metadata/legacy/test/v1"; +export type { TestMetadataV2 } from "@replay-cli/shared/recording/metadata/legacy/test/v2"; export { fetchWorkspaceConfig } from "./config"; export { getAccessToken } from "./getAccessToken"; export * from "./logging"; diff --git a/packages/test-utils/src/legacy-cli/metadata/env.test.ts b/packages/test-utils/src/legacy-cli/metadata/env.test.ts deleted file mode 100644 index a68154aed..000000000 --- a/packages/test-utils/src/legacy-cli/metadata/env.test.ts +++ /dev/null @@ -1,132 +0,0 @@ -import { firstEnvValueOf } from "./env"; - -describe("firstEnvValueOf", () => { - let env: NodeJS.ProcessEnv; - - beforeEach(() => { - env = process.env; - process.env = {}; - }); - - afterEach(() => { - process.env = env; - }); - - describe("using key names", () => { - it("returns first value", () => { - process.env = { - KEY_1: "key 1", - KEY_2: "key 2", - }; - - const resolver = firstEnvValueOf("KEY_1", "KEY_2"); - const result = resolver(); - - expect(result).toBe("key 1"); - }); - - it("ignores empty strings", () => { - process.env = { - KEY_1: "", - KEY_2: "key 2", - }; - - const resolver = firstEnvValueOf("KEY_1", "KEY_2"); - const result = resolver(); - - expect(result).toBe("key 2"); - }); - - it("ignores undefined keys", () => { - process.env = { - KEY_1: undefined, - KEY_2: "key 2", - }; - - const resolver = firstEnvValueOf("KEY_1", "KEY_2"); - const result = resolver(); - - expect(result).toBe("key 2"); - }); - - it("returns undefined when no keys match", () => { - process.env = { - KEY_1: undefined, - KEY_2: "key 2", - }; - - const resolver = firstEnvValueOf("KEY_3"); - const result = resolver(); - - expect(result).toBe(undefined); - }); - }); - - describe("using callbacks", () => { - it("returns first value", () => { - process.env = { - KEY_1: "key 1", - KEY_1_VALUE: "value 1", - KEY_2: "key 2", - KEY_2_VALUE: "value 2", - }; - - const resolver = firstEnvValueOf( - env => env.KEY_1 && env.KEY_1_VALUE, - env => env.KEY_2 && env.KEY_2_VALUE - ); - const result = resolver(); - - expect(result).toBe("value 1"); - }); - - it("ignores empty strings", () => { - process.env = { - KEY_1: "key 1", - KEY_1_VALUE: "", - KEY_2: "key 2", - KEY_2_VALUE: "value 2", - }; - - const resolver = firstEnvValueOf( - env => env.KEY_1 && env.KEY_1_VALUE, - env => env.KEY_2 && env.KEY_2_VALUE - ); - const result = resolver(); - - expect(result).toBe("value 2"); - }); - - it("ignores undefined keys", () => { - process.env = { - KEY_1: "key 1", - KEY_1_VALUE: undefined, - KEY_2: "key 2", - KEY_2_VALUE: "value 2", - }; - - const resolver = firstEnvValueOf( - env => env.KEY_1 && env.KEY_1_VALUE, - env => env.KEY_2 && env.KEY_2_VALUE - ); - const result = resolver(); - - expect(result).toBe("value 2"); - }); - - it("returns undefined when no keys match", () => { - process.env = { - KEY_1: "key 1", - KEY_2: "key 2", - }; - - const resolver = firstEnvValueOf( - env => env.KEY_1 && env.KEY_1_VALUE, - env => env.KEY_2 && env.KEY_2_VALUE - ); - const result = resolver(); - - expect(result).toBe(undefined); - }); - }); -}); diff --git a/packages/test-utils/src/legacy-cli/metadata/env.ts b/packages/test-utils/src/legacy-cli/metadata/env.ts deleted file mode 100644 index 79cc22494..000000000 --- a/packages/test-utils/src/legacy-cli/metadata/env.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { defaulted, string } from "superstruct"; - -type Resolver = string | ((env: NodeJS.ProcessEnv) => string | undefined); - -const firstEnvValueOf = - (...envKeys: Resolver[]) => - () => - envKeys.reduce( - (a, k) => a || (typeof k === "function" ? k(process.env) : process.env[k]), - undefined - ); - -const envString = (...envKeys: Resolver[]) => defaulted(string(), firstEnvValueOf(...envKeys)); - -export { firstEnvValueOf, envString }; diff --git a/packages/test-utils/src/legacy-cli/metadata/index.ts b/packages/test-utils/src/legacy-cli/metadata/index.ts deleted file mode 100644 index de9139828..000000000 --- a/packages/test-utils/src/legacy-cli/metadata/index.ts +++ /dev/null @@ -1,87 +0,0 @@ -import { appendFileSync } from "fs"; - -import { Options, UnstructuredMetadata } from "../types"; -import { maybeLogToConsole } from "../utils"; - -import * as test from "./test"; -import * as source from "./source"; -import { logger } from "@replay-cli/shared/logger"; -import { recordingLogPath } from "@replay-cli/shared/recording/config"; - -// Each known metadata block should have a sanitizer that will check the contents before the upload -const handlers = { - test: test.validate, - source: source.validate, -}; - -type AllowedKey = keyof typeof handlers; -const ALLOWED_KEYS = Object.keys(handlers); - -function isAllowedKey(key: string): key is AllowedKey { - return ALLOWED_KEYS.includes(key); -} - -// Sanitizing arbitrary recording metadata before uploading by removing any -// non-object values (allowing null) and limiting object values to known keys or -// userspace keys prefixed by `x-`. -async function sanitize(metadata: UnstructuredMetadata, opts: Options = {}) { - const updated: UnstructuredMetadata = {}; - for (const key of Object.keys(metadata)) { - const value = metadata[key]; - - if (typeof value !== "object") { - maybeLogToConsole( - opts.verbose, - `Ignoring metadata key "${key}". Expected an object but received ${typeof value}` - ); - - logger.info("SanitizeMetadata:UnexpectedKeyType", { key, keyType: typeof value }); - - continue; - } - - if (value === null || key.startsWith("x-")) { - // passthrough null or userspace types - updated[key] = value; - } else if (isAllowedKey(key)) { - // validate known types - const validated = await handlers[key](metadata as any); - Object.assign(updated, validated); - } else { - // and warn when dropping all other types - maybeLogToConsole( - opts.verbose, - `Ignoring metadata key "${key}". Custom metadata blocks must be prefixed by "x-". Try "x-${key}" instead.` - ); - - logger.info("SanitizeMetadata:IgnoringKey", { key }); - } - } - - return updated; -} - -/** - * Adds unstructured metadata to the local recordings database. - * - * New metadata will be merged with existing data. If the same key is used by - * multiple entries, the most recent entry's value will be used. - * - * Metadata is not validated until the recording is uploaded so arbitrary keys - * may be used here to manage recordings before upload. - * - * @param recordingId UUID of the recording - * @param metadata Recording metadata - */ -function add(recordingId: string, metadata: UnstructuredMetadata) { - const entry = { - id: recordingId, - kind: "addMetadata", - metadata, - timestamp: Date.now(), - }; - - appendFileSync(recordingLogPath, `\n${JSON.stringify(entry)}\n`); -} - -export { add, sanitize, source, test }; diff --git a/packages/test-utils/src/legacy-cli/metadata/source.test.ts b/packages/test-utils/src/legacy-cli/metadata/source.test.ts deleted file mode 100644 index 2cbca21c4..000000000 --- a/packages/test-utils/src/legacy-cli/metadata/source.test.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { init } from "./source"; - -describe("source", () => { - describe("init", () => { - describe("buildkite", () => { - it("omits merge.id when BUILDKITE_PULL_REQUEST is false", async () => { - process.env.BUILDKITE_COMMIT = "abc"; - process.env.BUILDKITE_PULL_REQUEST = "false"; - - const source = await init(); - expect(source).not.toHaveProperty("source.merge.id"); - }); - - it("includes merge.id when BUILDKITE_PULL_REQUEST is valued", async () => { - process.env.BUILDKITE_COMMIT = "abc"; - process.env.BUILDKITE_PULL_REQUEST = "123"; - - const source = await init(); - expect(source).toHaveProperty("source.merge.id", "123"); - }); - }); - }); -}); diff --git a/packages/test-utils/src/legacy-cli/metadata/source.ts b/packages/test-utils/src/legacy-cli/metadata/source.ts deleted file mode 100644 index 4089f3823..000000000 --- a/packages/test-utils/src/legacy-cli/metadata/source.ts +++ /dev/null @@ -1,395 +0,0 @@ -import { cachedFetch } from "@replay-cli/shared/cachedFetch"; -import fs from "fs"; -import { create, defaulted, number, object, optional } from "superstruct"; -import { UnstructuredMetadata } from "../types"; -import { envString } from "./env"; -import { logger } from "@replay-cli/shared/logger"; - -const defaultObject = (objStruct: any) => optional(defaulted(object(objStruct), {})); - -const VERSION = 1; - -class GitHubHttpError extends Error { - status: number; - statusText: string; - - constructor(status: number, statusText: string) { - super(); - this.status = status; - this.statusText = statusText; - } -} - -type CacheEntry = { json: any | null; status: number; statusText: string }; - -export const cache: Map = new Map(); - -export function resetCache(url?: string) { - if (url) { - cache.delete(url); - } else { - cache.clear(); - } -} - -function getCircleCISourceControlProvider(env: NodeJS.ProcessEnv) { - return env.CIRCLE_PULL_REQUEST?.startsWith("https://github.com") - ? "github" - : env.CIRCLE_PULL_REQUEST?.startsWith("https://bitbucket.com") - ? "bitbucket" - : undefined; -} - -function getCircleCIRepository(env: NodeJS.ProcessEnv) { - return env.CIRCLE_PROJECT_USERNAME && env.CIRCLE_PROJECT_REPONAME - ? `${env.CIRCLE_PROJECT_USERNAME}/${env.CIRCLE_PROJECT_REPONAME}` - : ""; -} - -function getCircleCIMergeId(env: NodeJS.ProcessEnv) { - logger.info("GetCircleCIMergeId:Started"); - - if (env.CIRCLE_PULL_REQUEST) { - logger.info("GetCircleCIMergeId:WillExtract", { circlePullRequest: env.CIRCLE_PULL_REQUEST }); - return env.CIRCLE_PULL_REQUEST.split("/").pop(); - } -} - -function getBuildkiteMessage(env: NodeJS.ProcessEnv) { - if (env.BUILDKITE_SOURCE === "webhook") { - return env.BUILDKITE_MESSAGE; - } -} - -function getBuildkiteRepository(env: NodeJS.ProcessEnv) { - return env.BUILDKITE_REPO?.match(/.*:(.*)\.git/)?.[1]; -} - -let gGitHubEvent: Record | null = null; - -function readGithubEvent(env: NodeJS.ProcessEnv) { - logger.info("ReadGithubEvent:Started"); - - const { GITHUB_EVENT_PATH } = env; - if (!GITHUB_EVENT_PATH) { - logger.info("ReadGithubEvent:NoEventFileSpecified"); - return; - } - - if (!fs.existsSync(GITHUB_EVENT_PATH)) { - logger.info("ReadGithubEvent:EventFileNotFound", { githubEventPath: GITHUB_EVENT_PATH }); - return; - } - - try { - if (!gGitHubEvent) { - logger.info("ReadGithubEvent:WillReadFromFile", { githubEventPath: GITHUB_EVENT_PATH }); - const contents = fs.readFileSync(GITHUB_EVENT_PATH, "utf8"); - gGitHubEvent = JSON.parse(contents); - } else { - logger.info("ReadGithubEvent:WillUseExistingFile"); - } - - return gGitHubEvent; - } catch (error) { - logger.error("ReadGithubEvent:Failed", { error }); - } -} - -function getGitHubMergeId(env: NodeJS.ProcessEnv) { - const event = readGithubEvent(env); - if (event?.pull_request?.number) { - return String(event.pull_request.number); - } -} - -function getGitHubMergeSHA(env: NodeJS.ProcessEnv): string | undefined { - const event = readGithubEvent(env); - if (event?.pull_request?.head?.sha) { - return event.pull_request.head.sha; - } -} - -async function expandCommitMetadataFromGitHub(repo: string, sha?: string) { - const { - GITHUB_TOKEN, - RECORD_REPLAY_METADATA_SOURCE_COMMIT_TITLE, - RECORD_REPLAY_METADATA_SOURCE_COMMIT_URL, - RECORD_REPLAY_METADATA_SOURCE_COMMIT_USER, - } = process.env; - - if (!repo || !sha) return; - - const url = `https://api.github.com/repos/${repo}/commits/${sha}`; - - logger.info("ExpandCommitMetadataFromGitHub:Started", { - url, - tokenLength: GITHUB_TOKEN?.length || 0, - }); - - const resp = await cachedFetch(url, { - headers: GITHUB_TOKEN - ? { - Authorization: `token ${GITHUB_TOKEN}`, - } - : undefined, - }); - - // override the SHA if passed because it might be the SHA from the github - // event rather than GITHUB_SHA. we update this regardless of our ability to - // fetch the details because that can fail due to a missing token. - process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_ID = sha; - if (resp.status === 200) { - const json = resp.json; - process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_TITLE = - RECORD_REPLAY_METADATA_SOURCE_COMMIT_TITLE || - json.commit.message.split("\n").shift().substring(0, 80); - process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_URL = - RECORD_REPLAY_METADATA_SOURCE_COMMIT_URL || json.html_url; - process.env.RECORD_REPLAY_METADATA_SOURCE_COMMIT_USER = - RECORD_REPLAY_METADATA_SOURCE_COMMIT_USER || json.author?.login; - } else { - logger.error("expandCommitMetadataFromGitHub:Failed", { - responseStatusText: resp.statusText, - responseStatus: resp.status, - }); - throw new GitHubHttpError(resp.status, resp.statusText); - } -} - -async function expandMergeMetadataFromGitHub(repo: string, pr?: string) { - const { - GITHUB_TOKEN, - RECORD_REPLAY_METADATA_SOURCE_MERGE_ID, - RECORD_REPLAY_METADATA_SOURCE_MERGE_TITLE, - RECORD_REPLAY_METADATA_SOURCE_MERGE_URL, - RECORD_REPLAY_METADATA_SOURCE_MERGE_USER, - RECORD_REPLAY_METADATA_SOURCE_BRANCH, - } = process.env; - - if (!repo || !pr) { - logger.error("ExpandMergeMetadataFromGitHub:MissingInfo", { hasRepo: !!repo, hasPr: !!pr }); - return; - } - - const url = `https://api.github.com/repos/${repo}/pulls/${pr}`; - - logger.info("ExpandMergeMetadataFromGitHub:WillFetch", { - url, - tokenLength: GITHUB_TOKEN?.length || 0, - }); - - const resp = await cachedFetch(url, { - headers: GITHUB_TOKEN - ? { - Authorization: `token ${GITHUB_TOKEN}`, - } - : undefined, - }); - - if (resp.status === 200) { - const json = await resp.json; - process.env.RECORD_REPLAY_METADATA_SOURCE_BRANCH = - RECORD_REPLAY_METADATA_SOURCE_BRANCH || json.head?.ref; - process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_ID = - RECORD_REPLAY_METADATA_SOURCE_MERGE_ID || pr; - process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_TITLE = - RECORD_REPLAY_METADATA_SOURCE_MERGE_TITLE || json.title; - process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_URL = - RECORD_REPLAY_METADATA_SOURCE_MERGE_URL || json.html_url; - process.env.RECORD_REPLAY_METADATA_SOURCE_MERGE_USER = - RECORD_REPLAY_METADATA_SOURCE_MERGE_USER || json.user?.login; - } else { - logger.error("ExpandMergeMetadataFromGitHub:Failed", { - responseStatus: resp.status, - responseStatusText: resp.statusText, - }); - - throw new GitHubHttpError(resp.status, resp.statusText); - } -} - -function buildTestRunId(repository: string | undefined, runId: string | undefined) { - if (repository && runId) { - return `${repository}--${runId}`; - } -} - -function getTestRunIdFromEnvironment(env: NodeJS.ProcessEnv) { - const userTestRunId = - process.env.REPLAY_METADATA_TEST_RUN_ID || - process.env.RECORD_REPLAY_METADATA_TEST_RUN_ID || - process.env.RECORD_REPLAY_TEST_RUN_ID; - - let ciTestRunId = - buildTestRunId(process.env.GITHUB_REPOSITORY, process.env.GITHUB_RUN_ID) || - buildTestRunId(process.env.CIRCLE_PROJECT_REPONAME, process.env.CIRCLE_WORKFLOW_ID) || - buildTestRunId(getBuildkiteRepository(process.env), process.env.BUILDKITE_BUILD_ID) || - buildTestRunId(process.env.SEMAPHORE_GIT_REPO_SLUG, process.env.SEMAPHORE_WORKFLOW_ID); - - return userTestRunId || ciTestRunId; -} - -const versions = () => ({ - [1 as number]: object({ - branch: optional( - envString( - "RECORD_REPLAY_METADATA_SOURCE_BRANCH", - "GITHUB_REF_NAME", - "BUILDKITE_BRANCH", - "CIRCLE_BRANCH", - "SEMAPHORE_GIT_PR_BRANCH" - ) - ), - commit: defaultObject({ - id: envString( - "RECORD_REPLAY_METADATA_SOURCE_COMMIT_ID", - "GITHUB_SHA", - "BUILDKITE_COMMIT", - "CIRCLE_SHA1", - "SEMAPHORE_GIT_SHA" - ), - title: optional(envString("RECORD_REPLAY_METADATA_SOURCE_COMMIT_TITLE", getBuildkiteMessage)), - url: optional(envString("RECORD_REPLAY_METADATA_SOURCE_COMMIT_URL")), - user: optional(envString("RECORD_REPLAY_METADATA_SOURCE_COMMIT_USER")), - }), - trigger: defaultObject({ - user: optional( - envString( - "RECORD_REPLAY_METADATA_SOURCE_TRIGGER_USER", - "GITHUB_ACTOR", - "BUILDKITE_BUILD_CREATOR", - "BUILDKITE_BUILD_AUTHOR", - "CIRCLE_USERNAME", - "CIRCLE_PR_USERNAME" - ) - ), - name: optional(envString("RECORD_REPLAY_METADATA_SOURCE_TRIGGER_NAME")), - workflow: optional( - envString( - "RECORD_REPLAY_METADATA_SOURCE_TRIGGER_WORKFLOW", - "GITHUB_RUN_ID", - "BUILDKITE_BUILD_ID", - "CIRCLE_WORKFLOW_ID", - "SEMAPHORE_WORKFLOW_ID" - ) - ), - url: optional( - envString( - "RECORD_REPLAY_METADATA_SOURCE_TRIGGER_URL", - env => - env.GITHUB_WORKFLOW && - `${env.GITHUB_SERVER_URL ?? "https://github.com"}/${ - env.GITHUB_REPOSITORY - }/actions/runs/${env.GITHUB_RUN_ID}`, - "BUILDKITE_BUILD_URL", - "CIRCLE_BUILD_URL", - env => - env.SEMAPHORE_ORGANIZATION_URL && - env.SEMAPHORE_WORKFLOW_ID && - `${env.SEMAPHORE_ORGANIZATION_URL}/workflows/${env.SEMAPHORE_WORKFLOW_ID}` - ) - ), - }), - merge: defaultObject({ - id: optional( - envString( - "RECORD_REPLAY_METADATA_SOURCE_MERGE_ID", - env => - env.BUILDKITE_PULL_REQUEST && env.BUILDKITE_PULL_REQUEST !== "false" - ? env.BUILDKITE_PULL_REQUEST - : undefined, - getCircleCIMergeId, - "SEMAPHORE_GIT_PR_NUMBER" - ) - ), - title: optional( - envString("RECORD_REPLAY_METADATA_SOURCE_MERGE_TITLE", "SEMAPHORE_GIT_PR_NAME") - ), - url: optional(envString("RECORD_REPLAY_METADATA_SOURCE_MERGE_URL")), - user: optional(envString("RECORD_REPLAY_METADATA_SOURCE_MERGE_USER")), - }), - provider: optional( - envString( - "RECORD_REPLAY_METADATA_SOURCE_PROVIDER", - env => env.GITHUB_WORKFLOW && "github", - "BUILDKITE_PIPELINE_PROVIDER", - getCircleCISourceControlProvider, - "SEMAPHORE_GIT_PROVIDER" - ) - ), - repository: optional( - envString( - "RECORD_REPLAY_METADATA_SOURCE_REPOSITORY", - "GITHUB_REPOSITORY", - getBuildkiteRepository, - getCircleCIRepository, - "SEMAPHORE_GIT_REPO_SLUG" - ) - ), - version: defaulted(number(), () => 1), - }), -}); - -function validate(metadata: { source: UnstructuredMetadata }) { - if (!metadata || !metadata.source) { - throw new Error("Source metadata does not exist"); - } - - return init(metadata.source); -} - -async function expandEnvironment() { - const { CIRCLECI, CIRCLE_SHA1, GITHUB_SHA, GITHUB_REPOSITORY } = process.env; - - try { - if (GITHUB_SHA && GITHUB_REPOSITORY) { - const sha = getGitHubMergeSHA(process.env) ?? GITHUB_SHA; - const mergeId = getGitHubMergeId(process.env); - logger.info("ExpandEnvironment:GithubContext", { mergeId, sha }); - await expandCommitMetadataFromGitHub(GITHUB_REPOSITORY, sha); - await expandMergeMetadataFromGitHub(GITHUB_REPOSITORY, mergeId); - } else if (CIRCLECI) { - const repo = getCircleCIRepository(process.env); - const provider = getCircleCISourceControlProvider(process.env); - - if (provider !== "github") { - logger.error("ExpandEnvironment:UnsupportedSourceControlProvider", { - circlePullRequest: process.env.CIRCLE_PULL_REQUEST, - }); - return; - } - - await expandCommitMetadataFromGitHub(repo, CIRCLE_SHA1); - await expandMergeMetadataFromGitHub(repo, getCircleCIMergeId(process.env)); - } - } catch (e) { - if (e instanceof GitHubHttpError) { - console.warn(`Unable to fetch pull request from GitHub: ${e.statusText}`); - if (!process.env.GITHUB_TOKEN && e.status === 404) { - console.warn( - "If this is a private repo, you can set the GITHUB_TOKEN environment variable\nwith a personal access token to allow the Replay CLI to fetch this metadata." - ); - } - } - - console.warn("Failed to expand environment details", e); - } -} - -async function init(data: UnstructuredMetadata = {}) { - const version = typeof data.version === "number" ? data.version : VERSION; - - await expandEnvironment(); - const structs = versions(); - - if (structs[version]) { - return { - source: create(data, structs[version]), - }; - } else { - throw new Error(`Source metadata version ${data.version} not supported`); - } -} - -export { getTestRunIdFromEnvironment, init, validate }; diff --git a/packages/test-utils/src/legacy-cli/metadata/test/index.ts b/packages/test-utils/src/legacy-cli/metadata/test/index.ts deleted file mode 100644 index 2f45eb23e..000000000 --- a/packages/test-utils/src/legacy-cli/metadata/test/index.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { create, Struct, any } from "superstruct"; - -import { UnstructuredMetadata } from "../../types"; - -import v1, { TestMetadataV1 } from "./v1"; -import v2, { TestMetadataV2 } from "./v2"; - -const VERSION = "2.1.0"; - -export type { TestMetadataV1, TestMetadataV2 }; -export type UserActionEvent = TestMetadataV1.UserActionEvent | TestMetadataV2.UserActionEvent; -export type Test = TestMetadataV1.Test | TestMetadataV2.Test; -export type TestResult = TestMetadataV1.TestResult | TestMetadataV2.TestResult; -export type TestRun = TestMetadataV1.TestRun | TestMetadataV2.TestRun; -export type TestError = TestMetadataV1.TestError | TestMetadataV2.TestError; - -const versions = { - ...v1, - ...v2, -}; - -function validate(metadata: { test: UnstructuredMetadata }) { - if (!metadata || !metadata.test) { - throw new Error("Test metadata does not exist"); - } - - return init(metadata.test); -} - -type Metadata = (typeof versions)[keyof typeof versions]; - -function getVersion(k: string): Struct { - const v: Struct | undefined = (versions as any)[k]; - if (!v) { - console.warn(`Unable to validate unknown version of test metadata:${k} `); - return any(); - } - - return v; -} - -function init(data: Metadata | UnstructuredMetadata = {}) { - let version = VERSION; - - if ("version" in data && typeof data.version === "number") { - // explicitly adapt the pre-semver scheme - version = "1.0.0"; - } else if ("schemaVersion" in data && typeof data.schemaVersion === "string") { - version = data.schemaVersion; - } - - let schema: Struct; - try { - schema = getVersion(version); - } catch { - console.warn( - `Unable to validate unknown version of test metadata: ${version || "Unspecified"}` - ); - - return { - test: data, - }; - } - - try { - return { - test: create(data, schema), - }; - } catch (e) { - console.error(e); - console.error("Metadata:"); - console.error(JSON.stringify(data, undefined, 2)); - - return {}; - } -} - -export { validate, init }; diff --git a/packages/test-utils/src/legacy-cli/metadata/test/v1.ts b/packages/test-utils/src/legacy-cli/metadata/test/v1.ts deleted file mode 100644 index 8a5c1409e..000000000 --- a/packages/test-utils/src/legacy-cli/metadata/test/v1.ts +++ /dev/null @@ -1,94 +0,0 @@ -import { - array, - defaulted, - enums, - number, - object, - optional, - string, - define, - any, - Infer, -} from "superstruct"; -const isUuid = require("is-uuid"); - -import { envString, firstEnvValueOf } from "../env"; - -const testResult = enums(["passed", "failed", "timedOut", "skipped", "unknown"]); -const testError = object({ - message: string(), - line: optional(number()), - column: optional(number()), -}); - -const test = object({ - id: optional(string()), - parentId: optional(string()), - title: string(), - path: optional(array(string())), - relativePath: optional(string()), - result: testResult, - error: optional(testError), - relativeStartTime: optional(number()), - duration: optional(number()), - steps: optional(array(any())), -}); - -const v1_0_0 = object({ - suite: optional(envString("RECORD_REPLAY_METADATA_TEST_SUITE")), - file: optional(envString("RECORD_REPLAY_METADATA_TEST_FILE")), - title: envString("RECORD_REPLAY_METADATA_TEST_TITLE"), - path: optional(array(string())), - result: defaulted( - enums(["passed", "failed", "timedOut", "skipped", "unknown"]), - firstEnvValueOf("RECORD_REPLAY_METADATA_TEST_RESULT") - ), - // before/after all hooks - hooks: optional( - array( - object({ - title: string(), - path: array(string()), - steps: optional(array(any())), - }) - ) - ), - tests: optional(array(test)), - runner: optional( - defaulted( - object({ - name: optional(envString("RECORD_REPLAY_METADATA_TEST_RUNNER_NAME")), - version: optional(envString("RECORD_REPLAY_METADATA_TEST_RUNNER_VERSION")), - plugin: optional(envString("RECORD_REPLAY_METADATA_TEST_RUNNER_PLUGIN")), - }), - {} - ) - ), - run: optional( - defaulted( - object({ - id: defaulted( - define("uuid", (v: any) => isUuid.v4(v)), - firstEnvValueOf("RECORD_REPLAY_METADATA_TEST_RUN_ID", "RECORD_REPLAY_TEST_RUN_ID") - ), - title: optional(envString("RECORD_REPLAY_METADATA_TEST_RUN_TITLE")), - mode: optional(envString("RECORD_REPLAY_METADATA_TEST_RUN_MODE")), - }), - {} - ) - ), - reporterErrors: defaulted(array(any()), []), - version: defaulted(number(), () => 1), -}); - -export namespace TestMetadataV1 { - export type UserActionEvent = any; - export type Test = Infer; - export type TestResult = Infer; - export type TestRun = Infer; - export type TestError = Infer; -} - -export default { - "1.0.0": v1_0_0, -}; diff --git a/packages/test-utils/src/legacy-cli/metadata/test/v2.ts b/packages/test-utils/src/legacy-cli/metadata/test/v2.ts deleted file mode 100644 index 89ca3e7f8..000000000 --- a/packages/test-utils/src/legacy-cli/metadata/test/v2.ts +++ /dev/null @@ -1,155 +0,0 @@ -import { - Infer, - array, - assign, - defaulted, - enums, - nullable, - number, - object, - optional, - record, - string, -} from "superstruct"; - -import { firstEnvValueOf } from "../env"; - -const testError = object({ - name: string(), - message: string(), - line: optional(number()), - column: optional(number()), -}); - -const userActionEvent = object({ - data: object({ - id: string(), - parentId: nullable(string()), - category: enums(["assertion", "command", "other"]), - command: object({ - arguments: array(string()), - name: string(), - }), - scope: nullable(array(string())), - error: nullable(testError), - }), -}); - -const testResult = enums(["failed", "passed", "skipped", "timedOut", "unknown"]); - -const test_v2_0_0 = object({ - events: object({ - afterAll: array(userActionEvent), - afterEach: array(userActionEvent), - beforeAll: array(userActionEvent), - beforeEach: array(userActionEvent), - main: array(userActionEvent), - }), - approximateDuration: number(), - result: testResult, - source: object({ - scope: array(string()), - title: string(), - }), - error: nullable(testError), -}); - -const v2_0_0 = object({ - approximateDuration: number(), - environment: object({ - errors: defaulted( - array( - object({ - code: number(), - detail: nullable(string()), - name: string(), - message: string(), - }) - ), - [] - ), - pluginVersion: string(), - testRunner: object({ - name: string(), - version: string(), - }), - }), - result: testResult, - resultCounts: record(testResult, number()), - schemaVersion: defaulted(string(), () => "2.0.0"), - source: object({ - path: string(), - title: string(), - }), - tests: array(test_v2_0_0), - run: defaulted( - object({ - id: defaulted( - string(), - firstEnvValueOf( - "REPLAY_METADATA_TEST_RUN_ID", - "RECORD_REPLAY_METADATA_TEST_RUN_ID", - "RECORD_REPLAY_TEST_RUN_ID" - ) - ), - title: optional( - defaulted( - string(), - firstEnvValueOf("REPLAY_METADATA_TEST_RUN_TITLE", "RECORD_REPLAY_METADATA_TEST_RUN_TITLE") - ) - ), - mode: optional( - defaulted( - string(), - firstEnvValueOf("REPLAY_METADATA_TEST_RUN_MODE", "RECORD_REPLAY_METADATA_TEST_RUN_MODE") - ) - ), - }), - {} - ), -}); - -const test_v2_1_0 = assign( - test_v2_0_0, - object({ - id: number(), - attempt: number(), - }) -); - -const test_v2_2_0 = assign( - test_v2_1_0, - object({ - executionId: string(), - executionGroupId: string(), - maxAttempts: number(), - }) -); - -const v2_1_0 = assign( - v2_0_0, - object({ - tests: array(test_v2_1_0), - }) -); - -const v2_2_0 = assign( - v2_1_0, - object({ - tests: array(test_v2_2_0), - }) -); - -export namespace TestMetadataV2 { - export type UserActionEvent = Infer; - export type Test = Infer; - export type TestResult = Infer; - export type TestRun = Infer; - export type TestError = Infer; -} - -export default { - "2.2.0": v2_2_0, - "2.1.0": v2_1_0, - "2.0.0": v2_0_0, -}; diff --git a/packages/test-utils/src/legacy-cli/upload.ts b/packages/test-utils/src/legacy-cli/upload.ts index f482c039c..8915fe01a 100644 --- a/packages/test-utils/src/legacy-cli/upload.ts +++ b/packages/test-utils/src/legacy-cli/upload.ts @@ -7,7 +7,7 @@ import pMap from "p-map"; import path from "path"; import { Worker } from "worker_threads"; import ProtocolClient from "./client"; -import { sanitize as sanitizeMetadata } from "./metadata"; +import { sanitizeMetadata } from "@replay-cli/shared/recording/metadata/sanitizeMetadata"; import { Options, OriginalSourceEntry, RecordingMetadata, SourceMapEntry } from "./types"; import { defer, isValidUUID, maybeLogToConsole } from "./utils"; import { getUserAgent } from "@replay-cli/shared/userAgent"; @@ -175,7 +175,7 @@ class ReplayClient { body: file, }); - if (resp.status !== 200) { + if (!resp.ok) { logger.error("ReplayClientUploadRecording:Failed", { responseText: await resp.text(), responseStatus: resp.status, diff --git a/packages/test-utils/src/metrics.ts b/packages/test-utils/src/metrics.ts index 96e4afce7..29dfc2d22 100644 --- a/packages/test-utils/src/metrics.ts +++ b/packages/test-utils/src/metrics.ts @@ -1,6 +1,6 @@ import fetch from "node-fetch"; -import os from "os"; -import { TestMetadataV2 } from "./legacy-cli/metadata/test/v2"; +import os from "node:os"; +import { TestMetadataV2 } from "@replay-cli/shared/recording/metadata/legacy/test/v2"; import { logger } from "@replay-cli/shared/logger"; function shouldReportTestMetrics() { diff --git a/packages/test-utils/src/reporter.ts b/packages/test-utils/src/reporter.ts index edd068318..c69b6ce42 100644 --- a/packages/test-utils/src/reporter.ts +++ b/packages/test-utils/src/reporter.ts @@ -11,8 +11,10 @@ import { dirname } from "path"; import { v4 as uuid } from "uuid"; import { getAccessToken } from "./getAccessToken"; import { listAllRecordings, removeRecording, uploadRecording } from "./legacy-cli"; -import { add, source as sourceMetadata, test as testMetadata } from "./legacy-cli/metadata"; -import type { TestMetadataV2 } from "./legacy-cli/metadata/test"; +import { addMetadata } from "@replay-cli/shared/recording/metadata/addMetadata"; +import * as sourceMetadata from "@replay-cli/shared/recording/metadata/legacy/source"; +import * as testMetadata from "@replay-cli/shared/recording/metadata/legacy/test/index"; +import type { TestMetadataV2 } from "@replay-cli/shared/recording/metadata/legacy/test/v2"; import { log } from "./logging"; import { getMetadataFilePath } from "./metadata"; import { pingTestMetrics } from "./metrics"; @@ -879,7 +881,7 @@ export default class ReplayReporter< }); } - recordings.forEach(rec => add(rec.id, mergedMetadata)); + recordings.forEach(rec => addMetadata(rec.id, mergedMetadata)); // Re-fetch recordings so we have the most recent metadata const allRecordings = listAllRecordings({ all: true }) as RecordingEntry[]; diff --git a/scripts/pkg-build/src/plugins/bundledDependencies.ts b/scripts/pkg-build/src/plugins/bundledDependencies.ts index 56bbacb6b..6b41b946d 100644 --- a/scripts/pkg-build/src/plugins/bundledDependencies.ts +++ b/scripts/pkg-build/src/plugins/bundledDependencies.ts @@ -16,10 +16,25 @@ export function transformImportSources( isBundledDependency: PackagePredicate; } ) { - // TODO: handle dynamic imports return code.replace( - /((?:import|export)\s+(?:{[\w\s,]*}\s+from\s+)?)["'](.+)["']/g, - (match, statementSlice, importedId) => { + // this regex matches: + // + // import "shared" + // import("shared") + // from "shared" + // + // it doesn't always check if the import is even at the valid position + // this should be good enough though, it's unlikely to match false positives + // + // note that it's important that we handle here cases like: + // + // import def from "shared" + // import * ns from "shared" + // import def, { named } from "shared" + // import { named } from "shared" + // export { named } from "shared" + /((?:import\s*\(\s*|import\s+|(?:\s|})from\s+))["'](.+)["']/g, + (match, preceedingSlice, importedId) => { if (!isBundledDependency(importedId)) { return match; } @@ -27,7 +42,7 @@ export function transformImportSources( if (!bundledPath.startsWith(".")) { bundledPath = `./${bundledPath}`; } - return statementSlice + `"${bundledPath}"`; + return preceedingSlice + `"${bundledPath}"`; } ); } @@ -125,6 +140,7 @@ export function bundledDependencies({ if (!/\.(mts|cts|ts|tsx)$/.test(id)) { return null; } + const code = transformImportSources( await fs.readFile(resolvedBundledIds.get(id) ?? id, "utf8"), {