From 555a30da2656b4a7cf47b107800bef097723363e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 7 May 2023 15:27:10 +0800 Subject: [PATCH] Bump yaml from 2.2.1 to 2.2.2 (#84) * Bump yaml from 2.2.1 to 2.2.2 Bumps [yaml](https://github.com/eemeli/yaml) from 2.2.1 to 2.2.2. - [Release notes](https://github.com/eemeli/yaml/releases) - [Commits](https://github.com/eemeli/yaml/compare/v2.2.1...v2.2.2) --- updated-dependencies: - dependency-name: yaml dependency-type: direct:production ... Signed-off-by: dependabot[bot] * Run prepare --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Zennon Gosalvez <1798166+zgosalvez@users.noreply.github.com> --- dist/index.js | 16 ++++++++-------- dist/index.js.map | 2 +- package-lock.json | 14 +++++++------- package.json | 2 +- 4 files changed, 17 insertions(+), 17 deletions(-) diff --git a/dist/index.js b/dist/index.js index 058a04b..47f475f 100644 --- a/dist/index.js +++ b/dist/index.js @@ -7735,7 +7735,7 @@ const prettifyError = (src, lc) => (error) => { let count = 1; const end = error.linePos[1]; if (end && end.line === line && end.col > col) { - count = Math.min(end.col - col, 80 - ci); + count = Math.max(1, Math.min(end.col - col, 80 - ci)); } const pointer = ' '.repeat(ci) + '^'.repeat(count); error.message += `:\n\n${lineStr}\n${pointer}\n`; @@ -12644,7 +12644,7 @@ function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemInden } } if (comment) { - str += stringifyComment.lineComment(str, commentString(comment), indent); + str += stringifyComment.lineComment(str, indent, commentString(comment)); if (onComment) onComment(); } @@ -12994,8 +12994,8 @@ exports.stringifyPair = stringifyPair; var Scalar = __nccwpck_require__(9338); var foldFlowLines = __nccwpck_require__(2889); -const getFoldOptions = (ctx) => ({ - indentAtStart: ctx.indentAtStart, +const getFoldOptions = (ctx, isBlock) => ({ + indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart, lineWidth: ctx.options.lineWidth, minContentWidth: ctx.options.minContentWidth }); @@ -13108,7 +13108,7 @@ function doubleQuotedString(value, ctx) { str = start ? str + json.slice(start) : json; return implicitKey ? str - : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx)); + : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx, false)); } function singleQuotedString(value, ctx) { if (ctx.options.singleQuote === false || @@ -13120,7 +13120,7 @@ function singleQuotedString(value, ctx) { const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'"; return ctx.implicitKey ? res - : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx)); + : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false)); } function quotedString(value, ctx) { const { singleQuote } = ctx.options; @@ -13218,7 +13218,7 @@ function blockString({ comment, type, value }, ctx, onComment, onChompKeep) { .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent .replace(/\n+/g, `$&${indent}`); - const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx)); + const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx, true)); return `${header}\n${indent}${body}`; } function plainString(item, ctx, onComment, onChompKeep) { @@ -13268,7 +13268,7 @@ function plainString(item, ctx, onComment, onChompKeep) { } return implicitKey ? str - : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx)); + : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false)); } function stringifyString(item, ctx, onComment, onChompKeep) { const { implicitKey, inFlow } = ctx; diff --git a/dist/index.js.map b/dist/index.js.map index af3baa8..89e1e2b 100644 --- a/dist/index.js.map +++ b/dist/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACl7BA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5NA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9rBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACz7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;AC3OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA","sources":["../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/core/lib/command.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/core/lib/core.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/core/lib/file-command.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/core/lib/path-utils.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/core/lib/summary.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/core/lib/utils.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/glob.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-glob-options-helper.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-globber.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-hash-files.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-match-kind.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-path-helper.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-path.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-pattern-helper.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-pattern.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-search-state.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/http-client/lib/auth.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/http-client/lib/index.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/http-client/lib/proxy.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/balanced-match/index.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/brace-expansion/index.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/concat-map/index.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/minimatch/minimatch.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/tunnel/index.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/tunnel/lib/tunnel.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/index.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/md5.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/nil.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/parse.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/regex.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/rng.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/sha1.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/stringify.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/v1.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/v3.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/v35.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/v4.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/v5.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/validate.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/version.js","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"assert\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"crypto\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"events\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"fs\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"http\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"https\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"net\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"os\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"path\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"stream\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"tls\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"util\"","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/compose-collection.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/compose-doc.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/compose-node.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/compose-scalar.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/composer.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/resolve-block-map.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/resolve-block-scalar.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/resolve-block-seq.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/resolve-end.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/resolve-flow-collection.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/resolve-flow-scalar.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/resolve-props.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/util-contains-newline.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/util-empty-scalar-position.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/util-flow-indent-check.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/util-map-includes.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/doc/Document.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/doc/anchors.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/doc/applyReviver.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/doc/createNode.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/doc/directives.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/errors.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/index.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/log.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/Alias.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/Collection.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/Node.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/Pair.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/Scalar.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/YAMLMap.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/YAMLSeq.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/addPairToJSMap.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/toJS.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/parse/cst-scalar.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/parse/cst-stringify.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/parse/cst-visit.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/parse/cst.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/parse/lexer.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/parse/line-counter.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/parse/parser.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/public-api.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/Schema.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/common/map.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/common/null.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/common/seq.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/common/string.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/core/bool.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/core/float.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/core/int.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/core/schema.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/json/schema.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/tags.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/binary.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/bool.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/float.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/int.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/omap.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/pairs.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/schema.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/set.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/timestamp.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/foldFlowLines.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/stringify.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/stringifyCollection.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/stringifyComment.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/stringifyDocument.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/stringifyNumber.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/stringifyPair.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/stringifyString.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/visit.js","../webpack://github-actions-ensure-sha-pinned-actions/webpack/bootstrap","../webpack://github-actions-ensure-sha-pinned-actions/webpack/runtime/compat","../webpack://github-actions-ensure-sha-pinned-actions/./src/index.js"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));\n }\n command_1.issueCommand('set-env', { name }, convertedVal);\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueFileCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n if (options && options.trimWhitespace === false) {\n return inputs;\n }\n return inputs.map(input => input.trim());\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n const filePath = process.env['GITHUB_OUTPUT'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));\n }\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n const filePath = process.env['GITHUB_STATE'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));\n }\n command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.prepareKeyValueMessage = exports.issueFileCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst uuid_1 = require(\"uuid\");\nconst utils_1 = require(\"./utils\");\nfunction issueFileCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueFileCommand = issueFileCommand;\nfunction prepareKeyValueMessage(key, value) {\n const delimiter = `ghadelimiter_${uuid_1.v4()}`;\n const convertedValue = utils_1.toCommandValue(value);\n // These should realistically never happen, but just in case someone finds a\n // way to exploit uuid generation let's not allow keys or values that contain\n // the delimiter.\n if (key.includes(delimiter)) {\n throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`);\n }\n if (convertedValue.includes(delimiter)) {\n throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`);\n }\n return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;\n}\nexports.prepareKeyValueMessage = prepareKeyValueMessage;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.hashFiles = exports.create = void 0;\nconst internal_globber_1 = require(\"./internal-globber\");\nconst internal_hash_files_1 = require(\"./internal-hash-files\");\n/**\n * Constructs a globber\n *\n * @param patterns Patterns separated by newlines\n * @param options Glob options\n */\nfunction create(patterns, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield internal_globber_1.DefaultGlobber.create(patterns, options);\n });\n}\nexports.create = create;\n/**\n * Computes the sha256 hash of a glob\n *\n * @param patterns Patterns separated by newlines\n * @param currentWorkspace Workspace used when matching files\n * @param options Glob options\n * @param verbose Enables verbose logging\n */\nfunction hashFiles(patterns, currentWorkspace = '', options, verbose = false) {\n return __awaiter(this, void 0, void 0, function* () {\n let followSymbolicLinks = true;\n if (options && typeof options.followSymbolicLinks === 'boolean') {\n followSymbolicLinks = options.followSymbolicLinks;\n }\n const globber = yield create(patterns, { followSymbolicLinks });\n return internal_hash_files_1.hashFiles(globber, currentWorkspace, verbose);\n });\n}\nexports.hashFiles = hashFiles;\n//# sourceMappingURL=glob.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOptions = void 0;\nconst core = __importStar(require(\"@actions/core\"));\n/**\n * Returns a copy with defaults filled in.\n */\nfunction getOptions(copy) {\n const result = {\n followSymbolicLinks: true,\n implicitDescendants: true,\n matchDirectories: true,\n omitBrokenSymbolicLinks: true\n };\n if (copy) {\n if (typeof copy.followSymbolicLinks === 'boolean') {\n result.followSymbolicLinks = copy.followSymbolicLinks;\n core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);\n }\n if (typeof copy.implicitDescendants === 'boolean') {\n result.implicitDescendants = copy.implicitDescendants;\n core.debug(`implicitDescendants '${result.implicitDescendants}'`);\n }\n if (typeof copy.matchDirectories === 'boolean') {\n result.matchDirectories = copy.matchDirectories;\n core.debug(`matchDirectories '${result.matchDirectories}'`);\n }\n if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {\n result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;\n core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);\n }\n }\n return result;\n}\nexports.getOptions = getOptions;\n//# sourceMappingURL=internal-glob-options-helper.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __asyncValues = (this && this.__asyncValues) || function (o) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var m = o[Symbol.asyncIterator], i;\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\n};\nvar __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }\nvar __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\n function fulfill(value) { resume(\"next\", value); }\n function reject(value) { resume(\"throw\", value); }\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DefaultGlobber = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst fs = __importStar(require(\"fs\"));\nconst globOptionsHelper = __importStar(require(\"./internal-glob-options-helper\"));\nconst path = __importStar(require(\"path\"));\nconst patternHelper = __importStar(require(\"./internal-pattern-helper\"));\nconst internal_match_kind_1 = require(\"./internal-match-kind\");\nconst internal_pattern_1 = require(\"./internal-pattern\");\nconst internal_search_state_1 = require(\"./internal-search-state\");\nconst IS_WINDOWS = process.platform === 'win32';\nclass DefaultGlobber {\n constructor(options) {\n this.patterns = [];\n this.searchPaths = [];\n this.options = globOptionsHelper.getOptions(options);\n }\n getSearchPaths() {\n // Return a copy\n return this.searchPaths.slice();\n }\n glob() {\n var e_1, _a;\n return __awaiter(this, void 0, void 0, function* () {\n const result = [];\n try {\n for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {\n const itemPath = _c.value;\n result.push(itemPath);\n }\n }\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\n finally {\n try {\n if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);\n }\n finally { if (e_1) throw e_1.error; }\n }\n return result;\n });\n }\n globGenerator() {\n return __asyncGenerator(this, arguments, function* globGenerator_1() {\n // Fill in defaults options\n const options = globOptionsHelper.getOptions(this.options);\n // Implicit descendants?\n const patterns = [];\n for (const pattern of this.patterns) {\n patterns.push(pattern);\n if (options.implicitDescendants &&\n (pattern.trailingSeparator ||\n pattern.segments[pattern.segments.length - 1] !== '**')) {\n patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**')));\n }\n }\n // Push the search paths\n const stack = [];\n for (const searchPath of patternHelper.getSearchPaths(patterns)) {\n core.debug(`Search path '${searchPath}'`);\n // Exists?\n try {\n // Intentionally using lstat. Detection for broken symlink\n // will be performed later (if following symlinks).\n yield __await(fs.promises.lstat(searchPath));\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n continue;\n }\n throw err;\n }\n stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));\n }\n // Search\n const traversalChain = []; // used to detect cycles\n while (stack.length) {\n // Pop\n const item = stack.pop();\n // Match?\n const match = patternHelper.match(patterns, item.path);\n const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);\n if (!match && !partialMatch) {\n continue;\n }\n // Stat\n const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)\n // Broken symlink, or symlink cycle detected, or no longer exists\n );\n // Broken symlink, or symlink cycle detected, or no longer exists\n if (!stats) {\n continue;\n }\n // Directory\n if (stats.isDirectory()) {\n // Matched\n if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) {\n yield yield __await(item.path);\n }\n // Descend?\n else if (!partialMatch) {\n continue;\n }\n // Push the child items in reverse\n const childLevel = item.level + 1;\n const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));\n stack.push(...childItems.reverse());\n }\n // File\n else if (match & internal_match_kind_1.MatchKind.File) {\n yield yield __await(item.path);\n }\n }\n });\n }\n /**\n * Constructs a DefaultGlobber\n */\n static create(patterns, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const result = new DefaultGlobber(options);\n if (IS_WINDOWS) {\n patterns = patterns.replace(/\\r\\n/g, '\\n');\n patterns = patterns.replace(/\\r/g, '\\n');\n }\n const lines = patterns.split('\\n').map(x => x.trim());\n for (const line of lines) {\n // Empty or comment\n if (!line || line.startsWith('#')) {\n continue;\n }\n // Pattern\n else {\n result.patterns.push(new internal_pattern_1.Pattern(line));\n }\n }\n result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));\n return result;\n });\n }\n static stat(item, options, traversalChain) {\n return __awaiter(this, void 0, void 0, function* () {\n // Note:\n // `stat` returns info about the target of a symlink (or symlink chain)\n // `lstat` returns info about a symlink itself\n let stats;\n if (options.followSymbolicLinks) {\n try {\n // Use `stat` (following symlinks)\n stats = yield fs.promises.stat(item.path);\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n if (options.omitBrokenSymbolicLinks) {\n core.debug(`Broken symlink '${item.path}'`);\n return undefined;\n }\n throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);\n }\n throw err;\n }\n }\n else {\n // Use `lstat` (not following symlinks)\n stats = yield fs.promises.lstat(item.path);\n }\n // Note, isDirectory() returns false for the lstat of a symlink\n if (stats.isDirectory() && options.followSymbolicLinks) {\n // Get the realpath\n const realPath = yield fs.promises.realpath(item.path);\n // Fixup the traversal chain to match the item level\n while (traversalChain.length >= item.level) {\n traversalChain.pop();\n }\n // Test for a cycle\n if (traversalChain.some((x) => x === realPath)) {\n core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);\n return undefined;\n }\n // Update the traversal chain\n traversalChain.push(realPath);\n }\n return stats;\n });\n }\n}\nexports.DefaultGlobber = DefaultGlobber;\n//# sourceMappingURL=internal-globber.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __asyncValues = (this && this.__asyncValues) || function (o) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var m = o[Symbol.asyncIterator], i;\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.hashFiles = void 0;\nconst crypto = __importStar(require(\"crypto\"));\nconst core = __importStar(require(\"@actions/core\"));\nconst fs = __importStar(require(\"fs\"));\nconst stream = __importStar(require(\"stream\"));\nconst util = __importStar(require(\"util\"));\nconst path = __importStar(require(\"path\"));\nfunction hashFiles(globber, currentWorkspace, verbose = false) {\n var e_1, _a;\n var _b;\n return __awaiter(this, void 0, void 0, function* () {\n const writeDelegate = verbose ? core.info : core.debug;\n let hasMatch = false;\n const githubWorkspace = currentWorkspace\n ? currentWorkspace\n : (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();\n const result = crypto.createHash('sha256');\n let count = 0;\n try {\n for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {\n const file = _d.value;\n writeDelegate(file);\n if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {\n writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);\n continue;\n }\n if (fs.statSync(file).isDirectory()) {\n writeDelegate(`Skip directory '${file}'.`);\n continue;\n }\n const hash = crypto.createHash('sha256');\n const pipeline = util.promisify(stream.pipeline);\n yield pipeline(fs.createReadStream(file), hash);\n result.write(hash.digest());\n count++;\n if (!hasMatch) {\n hasMatch = true;\n }\n }\n }\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\n finally {\n try {\n if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);\n }\n finally { if (e_1) throw e_1.error; }\n }\n result.end();\n if (hasMatch) {\n writeDelegate(`Found ${count} files to hash.`);\n return result.digest('hex');\n }\n else {\n writeDelegate(`No matches found for glob`);\n return '';\n }\n });\n}\nexports.hashFiles = hashFiles;\n//# sourceMappingURL=internal-hash-files.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.MatchKind = void 0;\n/**\n * Indicates whether a pattern matches a path\n */\nvar MatchKind;\n(function (MatchKind) {\n /** Not matched */\n MatchKind[MatchKind[\"None\"] = 0] = \"None\";\n /** Matched if the path is a directory */\n MatchKind[MatchKind[\"Directory\"] = 1] = \"Directory\";\n /** Matched if the path is a regular file */\n MatchKind[MatchKind[\"File\"] = 2] = \"File\";\n /** Matched */\n MatchKind[MatchKind[\"All\"] = 3] = \"All\";\n})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));\n//# sourceMappingURL=internal-match-kind.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;\nconst path = __importStar(require(\"path\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst IS_WINDOWS = process.platform === 'win32';\n/**\n * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.\n *\n * For example, on Linux/macOS:\n * - `/ => /`\n * - `/hello => /`\n *\n * For example, on Windows:\n * - `C:\\ => C:\\`\n * - `C:\\hello => C:\\`\n * - `C: => C:`\n * - `C:hello => C:`\n * - `\\ => \\`\n * - `\\hello => \\`\n * - `\\\\hello => \\\\hello`\n * - `\\\\hello\\world => \\\\hello\\world`\n */\nfunction dirname(p) {\n // Normalize slashes and trim unnecessary trailing slash\n p = safeTrimTrailingSeparator(p);\n // Windows UNC root, e.g. \\\\hello or \\\\hello\\world\n if (IS_WINDOWS && /^\\\\\\\\[^\\\\]+(\\\\[^\\\\]+)?$/.test(p)) {\n return p;\n }\n // Get dirname\n let result = path.dirname(p);\n // Trim trailing slash for Windows UNC root, e.g. \\\\hello\\world\\\n if (IS_WINDOWS && /^\\\\\\\\[^\\\\]+\\\\[^\\\\]+\\\\$/.test(result)) {\n result = safeTrimTrailingSeparator(result);\n }\n return result;\n}\nexports.dirname = dirname;\n/**\n * Roots the path if not already rooted. On Windows, relative roots like `\\`\n * or `C:` are expanded based on the current working directory.\n */\nfunction ensureAbsoluteRoot(root, itemPath) {\n assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);\n assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);\n // Already rooted\n if (hasAbsoluteRoot(itemPath)) {\n return itemPath;\n }\n // Windows\n if (IS_WINDOWS) {\n // Check for itemPath like C: or C:foo\n if (itemPath.match(/^[A-Z]:[^\\\\/]|^[A-Z]:$/i)) {\n let cwd = process.cwd();\n assert_1.default(cwd.match(/^[A-Z]:\\\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);\n // Drive letter matches cwd? Expand to cwd\n if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {\n // Drive only, e.g. C:\n if (itemPath.length === 2) {\n // Preserve specified drive letter case (upper or lower)\n return `${itemPath[0]}:\\\\${cwd.substr(3)}`;\n }\n // Drive + path, e.g. C:foo\n else {\n if (!cwd.endsWith('\\\\')) {\n cwd += '\\\\';\n }\n // Preserve specified drive letter case (upper or lower)\n return `${itemPath[0]}:\\\\${cwd.substr(3)}${itemPath.substr(2)}`;\n }\n }\n // Different drive\n else {\n return `${itemPath[0]}:\\\\${itemPath.substr(2)}`;\n }\n }\n // Check for itemPath like \\ or \\foo\n else if (normalizeSeparators(itemPath).match(/^\\\\$|^\\\\[^\\\\]/)) {\n const cwd = process.cwd();\n assert_1.default(cwd.match(/^[A-Z]:\\\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);\n return `${cwd[0]}:\\\\${itemPath.substr(1)}`;\n }\n }\n assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);\n // Otherwise ensure root ends with a separator\n if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\\\'))) {\n // Intentionally empty\n }\n else {\n // Append separator\n root += path.sep;\n }\n return root + itemPath;\n}\nexports.ensureAbsoluteRoot = ensureAbsoluteRoot;\n/**\n * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:\n * `\\\\hello\\share` and `C:\\hello` (and using alternate separator).\n */\nfunction hasAbsoluteRoot(itemPath) {\n assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);\n // Normalize separators\n itemPath = normalizeSeparators(itemPath);\n // Windows\n if (IS_WINDOWS) {\n // E.g. \\\\hello\\share or C:\\hello\n return itemPath.startsWith('\\\\\\\\') || /^[A-Z]:\\\\/i.test(itemPath);\n }\n // E.g. /hello\n return itemPath.startsWith('/');\n}\nexports.hasAbsoluteRoot = hasAbsoluteRoot;\n/**\n * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:\n * `\\`, `\\hello`, `\\\\hello\\share`, `C:`, and `C:\\hello` (and using alternate separator).\n */\nfunction hasRoot(itemPath) {\n assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);\n // Normalize separators\n itemPath = normalizeSeparators(itemPath);\n // Windows\n if (IS_WINDOWS) {\n // E.g. \\ or \\hello or \\\\hello\n // E.g. C: or C:\\hello\n return itemPath.startsWith('\\\\') || /^[A-Z]:/i.test(itemPath);\n }\n // E.g. /hello\n return itemPath.startsWith('/');\n}\nexports.hasRoot = hasRoot;\n/**\n * Removes redundant slashes and converts `/` to `\\` on Windows\n */\nfunction normalizeSeparators(p) {\n p = p || '';\n // Windows\n if (IS_WINDOWS) {\n // Convert slashes on Windows\n p = p.replace(/\\//g, '\\\\');\n // Remove redundant slashes\n const isUnc = /^\\\\\\\\+[^\\\\]/.test(p); // e.g. \\\\hello\n return (isUnc ? '\\\\' : '') + p.replace(/\\\\\\\\+/g, '\\\\'); // preserve leading \\\\ for UNC\n }\n // Remove redundant slashes\n return p.replace(/\\/\\/+/g, '/');\n}\nexports.normalizeSeparators = normalizeSeparators;\n/**\n * Normalizes the path separators and trims the trailing separator (when safe).\n * For example, `/foo/ => /foo` but `/ => /`\n */\nfunction safeTrimTrailingSeparator(p) {\n // Short-circuit if empty\n if (!p) {\n return '';\n }\n // Normalize separators\n p = normalizeSeparators(p);\n // No trailing slash\n if (!p.endsWith(path.sep)) {\n return p;\n }\n // Check '/' on Linux/macOS and '\\' on Windows\n if (p === path.sep) {\n return p;\n }\n // On Windows check if drive root. E.g. C:\\\n if (IS_WINDOWS && /^[A-Z]:\\\\$/i.test(p)) {\n return p;\n }\n // Otherwise trim trailing slash\n return p.substr(0, p.length - 1);\n}\nexports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;\n//# sourceMappingURL=internal-path-helper.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Path = void 0;\nconst path = __importStar(require(\"path\"));\nconst pathHelper = __importStar(require(\"./internal-path-helper\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst IS_WINDOWS = process.platform === 'win32';\n/**\n * Helper class for parsing paths into segments\n */\nclass Path {\n /**\n * Constructs a Path\n * @param itemPath Path or array of segments\n */\n constructor(itemPath) {\n this.segments = [];\n // String\n if (typeof itemPath === 'string') {\n assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);\n // Normalize slashes and trim unnecessary trailing slash\n itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);\n // Not rooted\n if (!pathHelper.hasRoot(itemPath)) {\n this.segments = itemPath.split(path.sep);\n }\n // Rooted\n else {\n // Add all segments, while not at the root\n let remaining = itemPath;\n let dir = pathHelper.dirname(remaining);\n while (dir !== remaining) {\n // Add the segment\n const basename = path.basename(remaining);\n this.segments.unshift(basename);\n // Truncate the last segment\n remaining = dir;\n dir = pathHelper.dirname(remaining);\n }\n // Remainder is the root\n this.segments.unshift(remaining);\n }\n }\n // Array\n else {\n // Must not be empty\n assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);\n // Each segment\n for (let i = 0; i < itemPath.length; i++) {\n let segment = itemPath[i];\n // Must not be empty\n assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);\n // Normalize slashes\n segment = pathHelper.normalizeSeparators(itemPath[i]);\n // Root segment\n if (i === 0 && pathHelper.hasRoot(segment)) {\n segment = pathHelper.safeTrimTrailingSeparator(segment);\n assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);\n this.segments.push(segment);\n }\n // All other segments\n else {\n // Must not contain slash\n assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);\n this.segments.push(segment);\n }\n }\n }\n }\n /**\n * Converts the path to it's string representation\n */\n toString() {\n // First segment\n let result = this.segments[0];\n // All others\n let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));\n for (let i = 1; i < this.segments.length; i++) {\n if (skipSlash) {\n skipSlash = false;\n }\n else {\n result += path.sep;\n }\n result += this.segments[i];\n }\n return result;\n }\n}\nexports.Path = Path;\n//# sourceMappingURL=internal-path.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.partialMatch = exports.match = exports.getSearchPaths = void 0;\nconst pathHelper = __importStar(require(\"./internal-path-helper\"));\nconst internal_match_kind_1 = require(\"./internal-match-kind\");\nconst IS_WINDOWS = process.platform === 'win32';\n/**\n * Given an array of patterns, returns an array of paths to search.\n * Duplicates and paths under other included paths are filtered out.\n */\nfunction getSearchPaths(patterns) {\n // Ignore negate patterns\n patterns = patterns.filter(x => !x.negate);\n // Create a map of all search paths\n const searchPathMap = {};\n for (const pattern of patterns) {\n const key = IS_WINDOWS\n ? pattern.searchPath.toUpperCase()\n : pattern.searchPath;\n searchPathMap[key] = 'candidate';\n }\n const result = [];\n for (const pattern of patterns) {\n // Check if already included\n const key = IS_WINDOWS\n ? pattern.searchPath.toUpperCase()\n : pattern.searchPath;\n if (searchPathMap[key] === 'included') {\n continue;\n }\n // Check for an ancestor search path\n let foundAncestor = false;\n let tempKey = key;\n let parent = pathHelper.dirname(tempKey);\n while (parent !== tempKey) {\n if (searchPathMap[parent]) {\n foundAncestor = true;\n break;\n }\n tempKey = parent;\n parent = pathHelper.dirname(tempKey);\n }\n // Include the search pattern in the result\n if (!foundAncestor) {\n result.push(pattern.searchPath);\n searchPathMap[key] = 'included';\n }\n }\n return result;\n}\nexports.getSearchPaths = getSearchPaths;\n/**\n * Matches the patterns against the path\n */\nfunction match(patterns, itemPath) {\n let result = internal_match_kind_1.MatchKind.None;\n for (const pattern of patterns) {\n if (pattern.negate) {\n result &= ~pattern.match(itemPath);\n }\n else {\n result |= pattern.match(itemPath);\n }\n }\n return result;\n}\nexports.match = match;\n/**\n * Checks whether to descend further into the directory\n */\nfunction partialMatch(patterns, itemPath) {\n return patterns.some(x => !x.negate && x.partialMatch(itemPath));\n}\nexports.partialMatch = partialMatch;\n//# sourceMappingURL=internal-pattern-helper.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Pattern = void 0;\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst pathHelper = __importStar(require(\"./internal-path-helper\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst minimatch_1 = require(\"minimatch\");\nconst internal_match_kind_1 = require(\"./internal-match-kind\");\nconst internal_path_1 = require(\"./internal-path\");\nconst IS_WINDOWS = process.platform === 'win32';\nclass Pattern {\n constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {\n /**\n * Indicates whether matches should be excluded from the result set\n */\n this.negate = false;\n // Pattern overload\n let pattern;\n if (typeof patternOrNegate === 'string') {\n pattern = patternOrNegate.trim();\n }\n // Segments overload\n else {\n // Convert to pattern\n segments = segments || [];\n assert_1.default(segments.length, `Parameter 'segments' must not empty`);\n const root = Pattern.getLiteral(segments[0]);\n assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);\n pattern = new internal_path_1.Path(segments).toString().trim();\n if (patternOrNegate) {\n pattern = `!${pattern}`;\n }\n }\n // Negate\n while (pattern.startsWith('!')) {\n this.negate = !this.negate;\n pattern = pattern.substr(1).trim();\n }\n // Normalize slashes and ensures absolute root\n pattern = Pattern.fixupPattern(pattern, homedir);\n // Segments\n this.segments = new internal_path_1.Path(pattern).segments;\n // Trailing slash indicates the pattern should only match directories, not regular files\n this.trailingSeparator = pathHelper\n .normalizeSeparators(pattern)\n .endsWith(path.sep);\n pattern = pathHelper.safeTrimTrailingSeparator(pattern);\n // Search path (literal path prior to the first glob segment)\n let foundGlob = false;\n const searchSegments = this.segments\n .map(x => Pattern.getLiteral(x))\n .filter(x => !foundGlob && !(foundGlob = x === ''));\n this.searchPath = new internal_path_1.Path(searchSegments).toString();\n // Root RegExp (required when determining partial match)\n this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');\n this.isImplicitPattern = isImplicitPattern;\n // Create minimatch\n const minimatchOptions = {\n dot: true,\n nobrace: true,\n nocase: IS_WINDOWS,\n nocomment: true,\n noext: true,\n nonegate: true\n };\n pattern = IS_WINDOWS ? pattern.replace(/\\\\/g, '/') : pattern;\n this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions);\n }\n /**\n * Matches the pattern against the specified path\n */\n match(itemPath) {\n // Last segment is globstar?\n if (this.segments[this.segments.length - 1] === '**') {\n // Normalize slashes\n itemPath = pathHelper.normalizeSeparators(itemPath);\n // Append a trailing slash. Otherwise Minimatch will not match the directory immediately\n // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns\n // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.\n if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) {\n // Note, this is safe because the constructor ensures the pattern has an absolute root.\n // For example, formats like C: and C:foo on Windows are resolved to an absolute root.\n itemPath = `${itemPath}${path.sep}`;\n }\n }\n else {\n // Normalize slashes and trim unnecessary trailing slash\n itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);\n }\n // Match\n if (this.minimatch.match(itemPath)) {\n return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All;\n }\n return internal_match_kind_1.MatchKind.None;\n }\n /**\n * Indicates whether the pattern may match descendants of the specified path\n */\n partialMatch(itemPath) {\n // Normalize slashes and trim unnecessary trailing slash\n itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);\n // matchOne does not handle root path correctly\n if (pathHelper.dirname(itemPath) === itemPath) {\n return this.rootRegExp.test(itemPath);\n }\n return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\\\+/ : /\\/+/), this.minimatch.set[0], true);\n }\n /**\n * Escapes glob patterns within a path\n */\n static globEscape(s) {\n return (IS_WINDOWS ? s : s.replace(/\\\\/g, '\\\\\\\\')) // escape '\\' on Linux/macOS\n .replace(/(\\[)(?=[^/]+\\])/g, '[[]') // escape '[' when ']' follows within the path segment\n .replace(/\\?/g, '[?]') // escape '?'\n .replace(/\\*/g, '[*]'); // escape '*'\n }\n /**\n * Normalizes slashes and ensures absolute root\n */\n static fixupPattern(pattern, homedir) {\n // Empty\n assert_1.default(pattern, 'pattern cannot be empty');\n // Must not contain `.` segment, unless first segment\n // Must not contain `..` segment\n const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));\n assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);\n // Must not contain globs in root, e.g. Windows UNC path \\\\foo\\b*r\n assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);\n // Normalize slashes\n pattern = pathHelper.normalizeSeparators(pattern);\n // Replace leading `.` segment\n if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) {\n pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1);\n }\n // Replace leading `~` segment\n else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {\n homedir = homedir || os.homedir();\n assert_1.default(homedir, 'Unable to determine HOME directory');\n assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);\n pattern = Pattern.globEscape(homedir) + pattern.substr(1);\n }\n // Replace relative drive root, e.g. pattern is C: or C:foo\n else if (IS_WINDOWS &&\n (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\\\]/i))) {\n let root = pathHelper.ensureAbsoluteRoot('C:\\\\dummy-root', pattern.substr(0, 2));\n if (pattern.length > 2 && !root.endsWith('\\\\')) {\n root += '\\\\';\n }\n pattern = Pattern.globEscape(root) + pattern.substr(2);\n }\n // Replace relative root, e.g. pattern is \\ or \\foo\n else if (IS_WINDOWS && (pattern === '\\\\' || pattern.match(/^\\\\[^\\\\]/))) {\n let root = pathHelper.ensureAbsoluteRoot('C:\\\\dummy-root', '\\\\');\n if (!root.endsWith('\\\\')) {\n root += '\\\\';\n }\n pattern = Pattern.globEscape(root) + pattern.substr(1);\n }\n // Otherwise ensure absolute root\n else {\n pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern);\n }\n return pathHelper.normalizeSeparators(pattern);\n }\n /**\n * Attempts to unescape a pattern segment to create a literal path segment.\n * Otherwise returns empty string.\n */\n static getLiteral(segment) {\n let literal = '';\n for (let i = 0; i < segment.length; i++) {\n const c = segment[i];\n // Escape\n if (c === '\\\\' && !IS_WINDOWS && i + 1 < segment.length) {\n literal += segment[++i];\n continue;\n }\n // Wildcard\n else if (c === '*' || c === '?') {\n return '';\n }\n // Character set\n else if (c === '[' && i + 1 < segment.length) {\n let set = '';\n let closed = -1;\n for (let i2 = i + 1; i2 < segment.length; i2++) {\n const c2 = segment[i2];\n // Escape\n if (c2 === '\\\\' && !IS_WINDOWS && i2 + 1 < segment.length) {\n set += segment[++i2];\n continue;\n }\n // Closed\n else if (c2 === ']') {\n closed = i2;\n break;\n }\n // Otherwise\n else {\n set += c2;\n }\n }\n // Closed?\n if (closed >= 0) {\n // Cannot convert\n if (set.length > 1) {\n return '';\n }\n // Convert to literal\n if (set) {\n literal += set;\n i = closed;\n continue;\n }\n }\n // Otherwise fall thru\n }\n // Append\n literal += c;\n }\n return literal;\n }\n /**\n * Escapes regexp special characters\n * https://javascript.info/regexp-escaping\n */\n static regExpEscape(s) {\n return s.replace(/[[\\\\^$.|?*+()]/g, '\\\\$&');\n }\n}\nexports.Pattern = Pattern;\n//# sourceMappingURL=internal-pattern.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SearchState = void 0;\nclass SearchState {\n constructor(path, level) {\n this.path = path;\n this.level = level;\n }\n}\nexports.SearchState = SearchState;\n//# sourceMappingURL=internal-search-state.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n//# sourceMappingURL=proxy.js.map","'use strict';\nmodule.exports = balanced;\nfunction balanced(a, b, str) {\n if (a instanceof RegExp) a = maybeMatch(a, str);\n if (b instanceof RegExp) b = maybeMatch(b, str);\n\n var r = range(a, b, str);\n\n return r && {\n start: r[0],\n end: r[1],\n pre: str.slice(0, r[0]),\n body: str.slice(r[0] + a.length, r[1]),\n post: str.slice(r[1] + b.length)\n };\n}\n\nfunction maybeMatch(reg, str) {\n var m = str.match(reg);\n return m ? m[0] : null;\n}\n\nbalanced.range = range;\nfunction range(a, b, str) {\n var begs, beg, left, right, result;\n var ai = str.indexOf(a);\n var bi = str.indexOf(b, ai + 1);\n var i = ai;\n\n if (ai >= 0 && bi > 0) {\n if(a===b) {\n return [ai, bi];\n }\n begs = [];\n left = str.length;\n\n while (i >= 0 && !result) {\n if (i == ai) {\n begs.push(i);\n ai = str.indexOf(a, i + 1);\n } else if (begs.length == 1) {\n result = [ begs.pop(), bi ];\n } else {\n beg = begs.pop();\n if (beg < left) {\n left = beg;\n right = bi;\n }\n\n bi = str.indexOf(b, i + 1);\n }\n\n i = ai < bi && ai >= 0 ? ai : bi;\n }\n\n if (begs.length) {\n result = [ left, right ];\n }\n }\n\n return result;\n}\n","var concatMap = require('concat-map');\nvar balanced = require('balanced-match');\n\nmodule.exports = expandTop;\n\nvar escSlash = '\\0SLASH'+Math.random()+'\\0';\nvar escOpen = '\\0OPEN'+Math.random()+'\\0';\nvar escClose = '\\0CLOSE'+Math.random()+'\\0';\nvar escComma = '\\0COMMA'+Math.random()+'\\0';\nvar escPeriod = '\\0PERIOD'+Math.random()+'\\0';\n\nfunction numeric(str) {\n return parseInt(str, 10) == str\n ? parseInt(str, 10)\n : str.charCodeAt(0);\n}\n\nfunction escapeBraces(str) {\n return str.split('\\\\\\\\').join(escSlash)\n .split('\\\\{').join(escOpen)\n .split('\\\\}').join(escClose)\n .split('\\\\,').join(escComma)\n .split('\\\\.').join(escPeriod);\n}\n\nfunction unescapeBraces(str) {\n return str.split(escSlash).join('\\\\')\n .split(escOpen).join('{')\n .split(escClose).join('}')\n .split(escComma).join(',')\n .split(escPeriod).join('.');\n}\n\n\n// Basically just str.split(\",\"), but handling cases\n// where we have nested braced sections, which should be\n// treated as individual members, like {a,{b,c},d}\nfunction parseCommaParts(str) {\n if (!str)\n return [''];\n\n var parts = [];\n var m = balanced('{', '}', str);\n\n if (!m)\n return str.split(',');\n\n var pre = m.pre;\n var body = m.body;\n var post = m.post;\n var p = pre.split(',');\n\n p[p.length-1] += '{' + body + '}';\n var postParts = parseCommaParts(post);\n if (post.length) {\n p[p.length-1] += postParts.shift();\n p.push.apply(p, postParts);\n }\n\n parts.push.apply(parts, p);\n\n return parts;\n}\n\nfunction expandTop(str) {\n if (!str)\n return [];\n\n // I don't know why Bash 4.3 does this, but it does.\n // Anything starting with {} will have the first two bytes preserved\n // but *only* at the top level, so {},a}b will not expand to anything,\n // but a{},b}c will be expanded to [a}c,abc].\n // One could argue that this is a bug in Bash, but since the goal of\n // this module is to match Bash's rules, we escape a leading {}\n if (str.substr(0, 2) === '{}') {\n str = '\\\\{\\\\}' + str.substr(2);\n }\n\n return expand(escapeBraces(str), true).map(unescapeBraces);\n}\n\nfunction identity(e) {\n return e;\n}\n\nfunction embrace(str) {\n return '{' + str + '}';\n}\nfunction isPadded(el) {\n return /^-?0\\d/.test(el);\n}\n\nfunction lte(i, y) {\n return i <= y;\n}\nfunction gte(i, y) {\n return i >= y;\n}\n\nfunction expand(str, isTop) {\n var expansions = [];\n\n var m = balanced('{', '}', str);\n if (!m || /\\$$/.test(m.pre)) return [str];\n\n var isNumericSequence = /^-?\\d+\\.\\.-?\\d+(?:\\.\\.-?\\d+)?$/.test(m.body);\n var isAlphaSequence = /^[a-zA-Z]\\.\\.[a-zA-Z](?:\\.\\.-?\\d+)?$/.test(m.body);\n var isSequence = isNumericSequence || isAlphaSequence;\n var isOptions = m.body.indexOf(',') >= 0;\n if (!isSequence && !isOptions) {\n // {a},b}\n if (m.post.match(/,.*\\}/)) {\n str = m.pre + '{' + m.body + escClose + m.post;\n return expand(str);\n }\n return [str];\n }\n\n var n;\n if (isSequence) {\n n = m.body.split(/\\.\\./);\n } else {\n n = parseCommaParts(m.body);\n if (n.length === 1) {\n // x{{a,b}}y ==> x{a}y x{b}y\n n = expand(n[0], false).map(embrace);\n if (n.length === 1) {\n var post = m.post.length\n ? expand(m.post, false)\n : [''];\n return post.map(function(p) {\n return m.pre + n[0] + p;\n });\n }\n }\n }\n\n // at this point, n is the parts, and we know it's not a comma set\n // with a single entry.\n\n // no need to expand pre, since it is guaranteed to be free of brace-sets\n var pre = m.pre;\n var post = m.post.length\n ? expand(m.post, false)\n : [''];\n\n var N;\n\n if (isSequence) {\n var x = numeric(n[0]);\n var y = numeric(n[1]);\n var width = Math.max(n[0].length, n[1].length)\n var incr = n.length == 3\n ? Math.abs(numeric(n[2]))\n : 1;\n var test = lte;\n var reverse = y < x;\n if (reverse) {\n incr *= -1;\n test = gte;\n }\n var pad = n.some(isPadded);\n\n N = [];\n\n for (var i = x; test(i, y); i += incr) {\n var c;\n if (isAlphaSequence) {\n c = String.fromCharCode(i);\n if (c === '\\\\')\n c = '';\n } else {\n c = String(i);\n if (pad) {\n var need = width - c.length;\n if (need > 0) {\n var z = new Array(need + 1).join('0');\n if (i < 0)\n c = '-' + z + c.slice(1);\n else\n c = z + c;\n }\n }\n }\n N.push(c);\n }\n } else {\n N = concatMap(n, function(el) { return expand(el, false) });\n }\n\n for (var j = 0; j < N.length; j++) {\n for (var k = 0; k < post.length; k++) {\n var expansion = pre + N[j] + post[k];\n if (!isTop || isSequence || expansion)\n expansions.push(expansion);\n }\n }\n\n return expansions;\n}\n\n","module.exports = function (xs, fn) {\n var res = [];\n for (var i = 0; i < xs.length; i++) {\n var x = fn(xs[i], i);\n if (isArray(x)) res.push.apply(res, x);\n else res.push(x);\n }\n return res;\n};\n\nvar isArray = Array.isArray || function (xs) {\n return Object.prototype.toString.call(xs) === '[object Array]';\n};\n","module.exports = minimatch\nminimatch.Minimatch = Minimatch\n\nvar path = (function () { try { return require('path') } catch (e) {}}()) || {\n sep: '/'\n}\nminimatch.sep = path.sep\n\nvar GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}\nvar expand = require('brace-expansion')\n\nvar plTypes = {\n '!': { open: '(?:(?!(?:', close: '))[^/]*?)'},\n '?': { open: '(?:', close: ')?' },\n '+': { open: '(?:', close: ')+' },\n '*': { open: '(?:', close: ')*' },\n '@': { open: '(?:', close: ')' }\n}\n\n// any single thing other than /\n// don't need to escape / when using new RegExp()\nvar qmark = '[^/]'\n\n// * => any number of characters\nvar star = qmark + '*?'\n\n// ** when dots are allowed. Anything goes, except .. and .\n// not (^ or / followed by one or two dots followed by $ or /),\n// followed by anything, any number of times.\nvar twoStarDot = '(?:(?!(?:\\\\\\/|^)(?:\\\\.{1,2})($|\\\\\\/)).)*?'\n\n// not a ^ or / followed by a dot,\n// followed by anything, any number of times.\nvar twoStarNoDot = '(?:(?!(?:\\\\\\/|^)\\\\.).)*?'\n\n// characters that need to be escaped in RegExp.\nvar reSpecials = charSet('().*{}+?[]^$\\\\!')\n\n// \"abc\" -> { a:true, b:true, c:true }\nfunction charSet (s) {\n return s.split('').reduce(function (set, c) {\n set[c] = true\n return set\n }, {})\n}\n\n// normalizes slashes.\nvar slashSplit = /\\/+/\n\nminimatch.filter = filter\nfunction filter (pattern, options) {\n options = options || {}\n return function (p, i, list) {\n return minimatch(p, pattern, options)\n }\n}\n\nfunction ext (a, b) {\n b = b || {}\n var t = {}\n Object.keys(a).forEach(function (k) {\n t[k] = a[k]\n })\n Object.keys(b).forEach(function (k) {\n t[k] = b[k]\n })\n return t\n}\n\nminimatch.defaults = function (def) {\n if (!def || typeof def !== 'object' || !Object.keys(def).length) {\n return minimatch\n }\n\n var orig = minimatch\n\n var m = function minimatch (p, pattern, options) {\n return orig(p, pattern, ext(def, options))\n }\n\n m.Minimatch = function Minimatch (pattern, options) {\n return new orig.Minimatch(pattern, ext(def, options))\n }\n m.Minimatch.defaults = function defaults (options) {\n return orig.defaults(ext(def, options)).Minimatch\n }\n\n m.filter = function filter (pattern, options) {\n return orig.filter(pattern, ext(def, options))\n }\n\n m.defaults = function defaults (options) {\n return orig.defaults(ext(def, options))\n }\n\n m.makeRe = function makeRe (pattern, options) {\n return orig.makeRe(pattern, ext(def, options))\n }\n\n m.braceExpand = function braceExpand (pattern, options) {\n return orig.braceExpand(pattern, ext(def, options))\n }\n\n m.match = function (list, pattern, options) {\n return orig.match(list, pattern, ext(def, options))\n }\n\n return m\n}\n\nMinimatch.defaults = function (def) {\n return minimatch.defaults(def).Minimatch\n}\n\nfunction minimatch (p, pattern, options) {\n assertValidPattern(pattern)\n\n if (!options) options = {}\n\n // shortcut: comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n return false\n }\n\n return new Minimatch(pattern, options).match(p)\n}\n\nfunction Minimatch (pattern, options) {\n if (!(this instanceof Minimatch)) {\n return new Minimatch(pattern, options)\n }\n\n assertValidPattern(pattern)\n\n if (!options) options = {}\n\n pattern = pattern.trim()\n\n // windows support: need to use /, not \\\n if (!options.allowWindowsEscape && path.sep !== '/') {\n pattern = pattern.split(path.sep).join('/')\n }\n\n this.options = options\n this.set = []\n this.pattern = pattern\n this.regexp = null\n this.negate = false\n this.comment = false\n this.empty = false\n this.partial = !!options.partial\n\n // make the set of regexps etc.\n this.make()\n}\n\nMinimatch.prototype.debug = function () {}\n\nMinimatch.prototype.make = make\nfunction make () {\n var pattern = this.pattern\n var options = this.options\n\n // empty patterns and comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n this.comment = true\n return\n }\n if (!pattern) {\n this.empty = true\n return\n }\n\n // step 1: figure out negation, etc.\n this.parseNegate()\n\n // step 2: expand braces\n var set = this.globSet = this.braceExpand()\n\n if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) }\n\n this.debug(this.pattern, set)\n\n // step 3: now we have a set, so turn each one into a series of path-portion\n // matching patterns.\n // These will be regexps, except in the case of \"**\", which is\n // set to the GLOBSTAR object for globstar behavior,\n // and will not contain any / characters\n set = this.globParts = set.map(function (s) {\n return s.split(slashSplit)\n })\n\n this.debug(this.pattern, set)\n\n // glob --> regexps\n set = set.map(function (s, si, set) {\n return s.map(this.parse, this)\n }, this)\n\n this.debug(this.pattern, set)\n\n // filter out everything that didn't compile properly.\n set = set.filter(function (s) {\n return s.indexOf(false) === -1\n })\n\n this.debug(this.pattern, set)\n\n this.set = set\n}\n\nMinimatch.prototype.parseNegate = parseNegate\nfunction parseNegate () {\n var pattern = this.pattern\n var negate = false\n var options = this.options\n var negateOffset = 0\n\n if (options.nonegate) return\n\n for (var i = 0, l = pattern.length\n ; i < l && pattern.charAt(i) === '!'\n ; i++) {\n negate = !negate\n negateOffset++\n }\n\n if (negateOffset) this.pattern = pattern.substr(negateOffset)\n this.negate = negate\n}\n\n// Brace expansion:\n// a{b,c}d -> abd acd\n// a{b,}c -> abc ac\n// a{0..3}d -> a0d a1d a2d a3d\n// a{b,c{d,e}f}g -> abg acdfg acefg\n// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg\n//\n// Invalid sets are not expanded.\n// a{2..}b -> a{2..}b\n// a{b}c -> a{b}c\nminimatch.braceExpand = function (pattern, options) {\n return braceExpand(pattern, options)\n}\n\nMinimatch.prototype.braceExpand = braceExpand\n\nfunction braceExpand (pattern, options) {\n if (!options) {\n if (this instanceof Minimatch) {\n options = this.options\n } else {\n options = {}\n }\n }\n\n pattern = typeof pattern === 'undefined'\n ? this.pattern : pattern\n\n assertValidPattern(pattern)\n\n // Thanks to Yeting Li for\n // improving this regexp to avoid a ReDOS vulnerability.\n if (options.nobrace || !/\\{(?:(?!\\{).)*\\}/.test(pattern)) {\n // shortcut. no need to expand.\n return [pattern]\n }\n\n return expand(pattern)\n}\n\nvar MAX_PATTERN_LENGTH = 1024 * 64\nvar assertValidPattern = function (pattern) {\n if (typeof pattern !== 'string') {\n throw new TypeError('invalid pattern')\n }\n\n if (pattern.length > MAX_PATTERN_LENGTH) {\n throw new TypeError('pattern is too long')\n }\n}\n\n// parse a component of the expanded set.\n// At this point, no pattern may contain \"/\" in it\n// so we're going to return a 2d array, where each entry is the full\n// pattern, split on '/', and then turned into a regular expression.\n// A regexp is made at the end which joins each array with an\n// escaped /, and another full one which joins each regexp with |.\n//\n// Following the lead of Bash 4.1, note that \"**\" only has special meaning\n// when it is the *only* thing in a path portion. Otherwise, any series\n// of * is equivalent to a single *. Globstar behavior is enabled by\n// default, and can be disabled by setting options.noglobstar.\nMinimatch.prototype.parse = parse\nvar SUBPARSE = {}\nfunction parse (pattern, isSub) {\n assertValidPattern(pattern)\n\n var options = this.options\n\n // shortcuts\n if (pattern === '**') {\n if (!options.noglobstar)\n return GLOBSTAR\n else\n pattern = '*'\n }\n if (pattern === '') return ''\n\n var re = ''\n var hasMagic = !!options.nocase\n var escaping = false\n // ? => one single character\n var patternListStack = []\n var negativeLists = []\n var stateChar\n var inClass = false\n var reClassStart = -1\n var classStart = -1\n // . and .. never match anything that doesn't start with .,\n // even when options.dot is set.\n var patternStart = pattern.charAt(0) === '.' ? '' // anything\n // not (start or / followed by . or .. followed by / or end)\n : options.dot ? '(?!(?:^|\\\\\\/)\\\\.{1,2}(?:$|\\\\\\/))'\n : '(?!\\\\.)'\n var self = this\n\n function clearStateChar () {\n if (stateChar) {\n // we had some state-tracking character\n // that wasn't consumed by this pass.\n switch (stateChar) {\n case '*':\n re += star\n hasMagic = true\n break\n case '?':\n re += qmark\n hasMagic = true\n break\n default:\n re += '\\\\' + stateChar\n break\n }\n self.debug('clearStateChar %j %j', stateChar, re)\n stateChar = false\n }\n }\n\n for (var i = 0, len = pattern.length, c\n ; (i < len) && (c = pattern.charAt(i))\n ; i++) {\n this.debug('%s\\t%s %s %j', pattern, i, re, c)\n\n // skip over any that are escaped.\n if (escaping && reSpecials[c]) {\n re += '\\\\' + c\n escaping = false\n continue\n }\n\n switch (c) {\n /* istanbul ignore next */\n case '/': {\n // completely not allowed, even escaped.\n // Should already be path-split by now.\n return false\n }\n\n case '\\\\':\n clearStateChar()\n escaping = true\n continue\n\n // the various stateChar values\n // for the \"extglob\" stuff.\n case '?':\n case '*':\n case '+':\n case '@':\n case '!':\n this.debug('%s\\t%s %s %j <-- stateChar', pattern, i, re, c)\n\n // all of those are literals inside a class, except that\n // the glob [!a] means [^a] in regexp\n if (inClass) {\n this.debug(' in class')\n if (c === '!' && i === classStart + 1) c = '^'\n re += c\n continue\n }\n\n // if we already have a stateChar, then it means\n // that there was something like ** or +? in there.\n // Handle the stateChar, then proceed with this one.\n self.debug('call clearStateChar %j', stateChar)\n clearStateChar()\n stateChar = c\n // if extglob is disabled, then +(asdf|foo) isn't a thing.\n // just clear the statechar *now*, rather than even diving into\n // the patternList stuff.\n if (options.noext) clearStateChar()\n continue\n\n case '(':\n if (inClass) {\n re += '('\n continue\n }\n\n if (!stateChar) {\n re += '\\\\('\n continue\n }\n\n patternListStack.push({\n type: stateChar,\n start: i - 1,\n reStart: re.length,\n open: plTypes[stateChar].open,\n close: plTypes[stateChar].close\n })\n // negation is (?:(?!js)[^/]*)\n re += stateChar === '!' ? '(?:(?!(?:' : '(?:'\n this.debug('plType %j %j', stateChar, re)\n stateChar = false\n continue\n\n case ')':\n if (inClass || !patternListStack.length) {\n re += '\\\\)'\n continue\n }\n\n clearStateChar()\n hasMagic = true\n var pl = patternListStack.pop()\n // negation is (?:(?!js)[^/]*)\n // The others are (?:)\n re += pl.close\n if (pl.type === '!') {\n negativeLists.push(pl)\n }\n pl.reEnd = re.length\n continue\n\n case '|':\n if (inClass || !patternListStack.length || escaping) {\n re += '\\\\|'\n escaping = false\n continue\n }\n\n clearStateChar()\n re += '|'\n continue\n\n // these are mostly the same in regexp and glob\n case '[':\n // swallow any state-tracking char before the [\n clearStateChar()\n\n if (inClass) {\n re += '\\\\' + c\n continue\n }\n\n inClass = true\n classStart = i\n reClassStart = re.length\n re += c\n continue\n\n case ']':\n // a right bracket shall lose its special\n // meaning and represent itself in\n // a bracket expression if it occurs\n // first in the list. -- POSIX.2 2.8.3.2\n if (i === classStart + 1 || !inClass) {\n re += '\\\\' + c\n escaping = false\n continue\n }\n\n // handle the case where we left a class open.\n // \"[z-a]\" is valid, equivalent to \"\\[z-a\\]\"\n // split where the last [ was, make sure we don't have\n // an invalid re. if so, re-walk the contents of the\n // would-be class to re-translate any characters that\n // were passed through as-is\n // TODO: It would probably be faster to determine this\n // without a try/catch and a new RegExp, but it's tricky\n // to do safely. For now, this is safe and works.\n var cs = pattern.substring(classStart + 1, i)\n try {\n RegExp('[' + cs + ']')\n } catch (er) {\n // not a valid class!\n var sp = this.parse(cs, SUBPARSE)\n re = re.substr(0, reClassStart) + '\\\\[' + sp[0] + '\\\\]'\n hasMagic = hasMagic || sp[1]\n inClass = false\n continue\n }\n\n // finish up the class.\n hasMagic = true\n inClass = false\n re += c\n continue\n\n default:\n // swallow any state char that wasn't consumed\n clearStateChar()\n\n if (escaping) {\n // no need\n escaping = false\n } else if (reSpecials[c]\n && !(c === '^' && inClass)) {\n re += '\\\\'\n }\n\n re += c\n\n } // switch\n } // for\n\n // handle the case where we left a class open.\n // \"[abc\" is valid, equivalent to \"\\[abc\"\n if (inClass) {\n // split where the last [ was, and escape it\n // this is a huge pita. We now have to re-walk\n // the contents of the would-be class to re-translate\n // any characters that were passed through as-is\n cs = pattern.substr(classStart + 1)\n sp = this.parse(cs, SUBPARSE)\n re = re.substr(0, reClassStart) + '\\\\[' + sp[0]\n hasMagic = hasMagic || sp[1]\n }\n\n // handle the case where we had a +( thing at the *end*\n // of the pattern.\n // each pattern list stack adds 3 chars, and we need to go through\n // and escape any | chars that were passed through as-is for the regexp.\n // Go through and escape them, taking care not to double-escape any\n // | chars that were already escaped.\n for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {\n var tail = re.slice(pl.reStart + pl.open.length)\n this.debug('setting tail', re, pl)\n // maybe some even number of \\, then maybe 1 \\, followed by a |\n tail = tail.replace(/((?:\\\\{2}){0,64})(\\\\?)\\|/g, function (_, $1, $2) {\n if (!$2) {\n // the | isn't already escaped, so escape it.\n $2 = '\\\\'\n }\n\n // need to escape all those slashes *again*, without escaping the\n // one that we need for escaping the | character. As it works out,\n // escaping an even number of slashes can be done by simply repeating\n // it exactly after itself. That's why this trick works.\n //\n // I am sorry that you have to see this.\n return $1 + $1 + $2 + '|'\n })\n\n this.debug('tail=%j\\n %s', tail, tail, pl, re)\n var t = pl.type === '*' ? star\n : pl.type === '?' ? qmark\n : '\\\\' + pl.type\n\n hasMagic = true\n re = re.slice(0, pl.reStart) + t + '\\\\(' + tail\n }\n\n // handle trailing things that only matter at the very end.\n clearStateChar()\n if (escaping) {\n // trailing \\\\\n re += '\\\\\\\\'\n }\n\n // only need to apply the nodot start if the re starts with\n // something that could conceivably capture a dot\n var addPatternStart = false\n switch (re.charAt(0)) {\n case '[': case '.': case '(': addPatternStart = true\n }\n\n // Hack to work around lack of negative lookbehind in JS\n // A pattern like: *.!(x).!(y|z) needs to ensure that a name\n // like 'a.xyz.yz' doesn't match. So, the first negative\n // lookahead, has to look ALL the way ahead, to the end of\n // the pattern.\n for (var n = negativeLists.length - 1; n > -1; n--) {\n var nl = negativeLists[n]\n\n var nlBefore = re.slice(0, nl.reStart)\n var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)\n var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)\n var nlAfter = re.slice(nl.reEnd)\n\n nlLast += nlAfter\n\n // Handle nested stuff like *(*.js|!(*.json)), where open parens\n // mean that we should *not* include the ) in the bit that is considered\n // \"after\" the negated section.\n var openParensBefore = nlBefore.split('(').length - 1\n var cleanAfter = nlAfter\n for (i = 0; i < openParensBefore; i++) {\n cleanAfter = cleanAfter.replace(/\\)[+*?]?/, '')\n }\n nlAfter = cleanAfter\n\n var dollar = ''\n if (nlAfter === '' && isSub !== SUBPARSE) {\n dollar = '$'\n }\n var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast\n re = newRe\n }\n\n // if the re is not \"\" at this point, then we need to make sure\n // it doesn't match against an empty path part.\n // Otherwise a/* will match a/, which it should not.\n if (re !== '' && hasMagic) {\n re = '(?=.)' + re\n }\n\n if (addPatternStart) {\n re = patternStart + re\n }\n\n // parsing just a piece of a larger pattern.\n if (isSub === SUBPARSE) {\n return [re, hasMagic]\n }\n\n // skip the regexp for non-magical patterns\n // unescape anything in it, though, so that it'll be\n // an exact match against a file etc.\n if (!hasMagic) {\n return globUnescape(pattern)\n }\n\n var flags = options.nocase ? 'i' : ''\n try {\n var regExp = new RegExp('^' + re + '$', flags)\n } catch (er) /* istanbul ignore next - should be impossible */ {\n // If it was an invalid regular expression, then it can't match\n // anything. This trick looks for a character after the end of\n // the string, which is of course impossible, except in multi-line\n // mode, but it's not a /m regex.\n return new RegExp('$.')\n }\n\n regExp._glob = pattern\n regExp._src = re\n\n return regExp\n}\n\nminimatch.makeRe = function (pattern, options) {\n return new Minimatch(pattern, options || {}).makeRe()\n}\n\nMinimatch.prototype.makeRe = makeRe\nfunction makeRe () {\n if (this.regexp || this.regexp === false) return this.regexp\n\n // at this point, this.set is a 2d array of partial\n // pattern strings, or \"**\".\n //\n // It's better to use .match(). This function shouldn't\n // be used, really, but it's pretty convenient sometimes,\n // when you just want to work with a regex.\n var set = this.set\n\n if (!set.length) {\n this.regexp = false\n return this.regexp\n }\n var options = this.options\n\n var twoStar = options.noglobstar ? star\n : options.dot ? twoStarDot\n : twoStarNoDot\n var flags = options.nocase ? 'i' : ''\n\n var re = set.map(function (pattern) {\n return pattern.map(function (p) {\n return (p === GLOBSTAR) ? twoStar\n : (typeof p === 'string') ? regExpEscape(p)\n : p._src\n }).join('\\\\\\/')\n }).join('|')\n\n // must match entire pattern\n // ending in a * or ** will make it less strict.\n re = '^(?:' + re + ')$'\n\n // can match anything, as long as it's not this.\n if (this.negate) re = '^(?!' + re + ').*$'\n\n try {\n this.regexp = new RegExp(re, flags)\n } catch (ex) /* istanbul ignore next - should be impossible */ {\n this.regexp = false\n }\n return this.regexp\n}\n\nminimatch.match = function (list, pattern, options) {\n options = options || {}\n var mm = new Minimatch(pattern, options)\n list = list.filter(function (f) {\n return mm.match(f)\n })\n if (mm.options.nonull && !list.length) {\n list.push(pattern)\n }\n return list\n}\n\nMinimatch.prototype.match = function match (f, partial) {\n if (typeof partial === 'undefined') partial = this.partial\n this.debug('match', f, this.pattern)\n // short-circuit in the case of busted things.\n // comments, etc.\n if (this.comment) return false\n if (this.empty) return f === ''\n\n if (f === '/' && partial) return true\n\n var options = this.options\n\n // windows: need to use /, not \\\n if (path.sep !== '/') {\n f = f.split(path.sep).join('/')\n }\n\n // treat the test path as a set of pathparts.\n f = f.split(slashSplit)\n this.debug(this.pattern, 'split', f)\n\n // just ONE of the pattern sets in this.set needs to match\n // in order for it to be valid. If negating, then just one\n // match means that we have failed.\n // Either way, return on the first hit.\n\n var set = this.set\n this.debug(this.pattern, 'set', set)\n\n // Find the basename of the path by looking for the last non-empty segment\n var filename\n var i\n for (i = f.length - 1; i >= 0; i--) {\n filename = f[i]\n if (filename) break\n }\n\n for (i = 0; i < set.length; i++) {\n var pattern = set[i]\n var file = f\n if (options.matchBase && pattern.length === 1) {\n file = [filename]\n }\n var hit = this.matchOne(file, pattern, partial)\n if (hit) {\n if (options.flipNegate) return true\n return !this.negate\n }\n }\n\n // didn't get any hits. this is success if it's a negative\n // pattern, failure otherwise.\n if (options.flipNegate) return false\n return this.negate\n}\n\n// set partial to true to test if, for example,\n// \"/a/b\" matches the start of \"/*/b/*/d\"\n// Partial means, if you run out of file before you run\n// out of pattern, then that's fine, as long as all\n// the parts match.\nMinimatch.prototype.matchOne = function (file, pattern, partial) {\n var options = this.options\n\n this.debug('matchOne',\n { 'this': this, file: file, pattern: pattern })\n\n this.debug('matchOne', file.length, pattern.length)\n\n for (var fi = 0,\n pi = 0,\n fl = file.length,\n pl = pattern.length\n ; (fi < fl) && (pi < pl)\n ; fi++, pi++) {\n this.debug('matchOne loop')\n var p = pattern[pi]\n var f = file[fi]\n\n this.debug(pattern, p, f)\n\n // should be impossible.\n // some invalid regexp stuff in the set.\n /* istanbul ignore if */\n if (p === false) return false\n\n if (p === GLOBSTAR) {\n this.debug('GLOBSTAR', [pattern, p, f])\n\n // \"**\"\n // a/**/b/**/c would match the following:\n // a/b/x/y/z/c\n // a/x/y/z/b/c\n // a/b/x/b/x/c\n // a/b/c\n // To do this, take the rest of the pattern after\n // the **, and see if it would match the file remainder.\n // If so, return success.\n // If not, the ** \"swallows\" a segment, and try again.\n // This is recursively awful.\n //\n // a/**/b/**/c matching a/b/x/y/z/c\n // - a matches a\n // - doublestar\n // - matchOne(b/x/y/z/c, b/**/c)\n // - b matches b\n // - doublestar\n // - matchOne(x/y/z/c, c) -> no\n // - matchOne(y/z/c, c) -> no\n // - matchOne(z/c, c) -> no\n // - matchOne(c, c) yes, hit\n var fr = fi\n var pr = pi + 1\n if (pr === pl) {\n this.debug('** at the end')\n // a ** at the end will just swallow the rest.\n // We have found a match.\n // however, it will not swallow /.x, unless\n // options.dot is set.\n // . and .. are *never* matched by **, for explosively\n // exponential reasons.\n for (; fi < fl; fi++) {\n if (file[fi] === '.' || file[fi] === '..' ||\n (!options.dot && file[fi].charAt(0) === '.')) return false\n }\n return true\n }\n\n // ok, let's see if we can swallow whatever we can.\n while (fr < fl) {\n var swallowee = file[fr]\n\n this.debug('\\nglobstar while', file, fr, pattern, pr, swallowee)\n\n // XXX remove this slice. Just pass the start index.\n if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {\n this.debug('globstar found match!', fr, fl, swallowee)\n // found a match.\n return true\n } else {\n // can't swallow \".\" or \"..\" ever.\n // can only swallow \".foo\" when explicitly asked.\n if (swallowee === '.' || swallowee === '..' ||\n (!options.dot && swallowee.charAt(0) === '.')) {\n this.debug('dot detected!', file, fr, pattern, pr)\n break\n }\n\n // ** swallows a segment, and continue.\n this.debug('globstar swallow a segment, and continue')\n fr++\n }\n }\n\n // no match was found.\n // However, in partial mode, we can't say this is necessarily over.\n // If there's more *pattern* left, then\n /* istanbul ignore if */\n if (partial) {\n // ran out of file\n this.debug('\\n>>> no match, partial?', file, fr, pattern, pr)\n if (fr === fl) return true\n }\n return false\n }\n\n // something other than **\n // non-magic patterns just have to match exactly\n // patterns with magic have been turned into regexps.\n var hit\n if (typeof p === 'string') {\n hit = f === p\n this.debug('string match', p, f, hit)\n } else {\n hit = f.match(p)\n this.debug('pattern match', p, f, hit)\n }\n\n if (!hit) return false\n }\n\n // Note: ending in / means that we'll get a final \"\"\n // at the end of the pattern. This can only match a\n // corresponding \"\" at the end of the file.\n // If the file ends in /, then it can only match a\n // a pattern that ends in /, unless the pattern just\n // doesn't have any more for it. But, a/b/ should *not*\n // match \"a/b/*\", even though \"\" matches against the\n // [^/]*? pattern, except in partial mode, where it might\n // simply not be reached yet.\n // However, a/b/ should still satisfy a/*\n\n // now either we fell off the end of the pattern, or we're done.\n if (fi === fl && pi === pl) {\n // ran out of pattern and filename at the same time.\n // an exact hit!\n return true\n } else if (fi === fl) {\n // ran out of file, but still had pattern left.\n // this is ok if we're doing the match as part of\n // a glob fs traversal.\n return partial\n } else /* istanbul ignore else */ if (pi === pl) {\n // ran out of pattern, still have file left.\n // this is only acceptable if we're on the very last\n // empty segment of a file with a trailing slash.\n // a/* should match a/b/\n return (fi === fl - 1) && (file[fi] === '')\n }\n\n // should be unreachable.\n /* istanbul ignore next */\n throw new Error('wtf?')\n}\n\n// replace stuff like \\* with *\nfunction globUnescape (s) {\n return s.replace(/\\\\(.)/g, '$1')\n}\n\nfunction regExpEscape (s) {\n return s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&')\n}\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.default)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = _default;\nexports.URL = exports.DNS = void 0;\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction _default(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nvar _default = version;\nexports.default = _default;","module.exports = require(\"assert\");","module.exports = require(\"crypto\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"stream\");","module.exports = require(\"tls\");","module.exports = require(\"util\");","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveBlockMap = require('./resolve-block-map.js');\nvar resolveBlockSeq = require('./resolve-block-seq.js');\nvar resolveFlowCollection = require('./resolve-flow-collection.js');\n\nfunction composeCollection(CN, ctx, token, tagToken, onError) {\n let coll;\n switch (token.type) {\n case 'block-map': {\n coll = resolveBlockMap.resolveBlockMap(CN, ctx, token, onError);\n break;\n }\n case 'block-seq': {\n coll = resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError);\n break;\n }\n case 'flow-collection': {\n coll = resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError);\n break;\n }\n }\n if (!tagToken)\n return coll;\n const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));\n if (!tagName)\n return coll;\n // Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841\n const Coll = coll.constructor;\n if (tagName === '!' || tagName === Coll.tagName) {\n coll.tag = Coll.tagName;\n return coll;\n }\n const expType = Node.isMap(coll) ? 'map' : 'seq';\n let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName);\n if (!tag) {\n const kt = ctx.schema.knownTags[tagName];\n if (kt && kt.collection === expType) {\n ctx.schema.tags.push(Object.assign({}, kt, { default: false }));\n tag = kt;\n }\n else {\n onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);\n coll.tag = tagName;\n return coll;\n }\n }\n const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options);\n const node = Node.isNode(res)\n ? res\n : new Scalar.Scalar(res);\n node.range = coll.range;\n node.tag = tagName;\n if (tag?.format)\n node.format = tag.format;\n return node;\n}\n\nexports.composeCollection = composeCollection;\n","'use strict';\n\nvar Document = require('../doc/Document.js');\nvar composeNode = require('./compose-node.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\n\nfunction composeDoc(options, directives, { offset, start, value, end }, onError) {\n const opts = Object.assign({ _directives: directives }, options);\n const doc = new Document.Document(undefined, opts);\n const ctx = {\n atRoot: true,\n directives: doc.directives,\n options: doc.options,\n schema: doc.schema\n };\n const props = resolveProps.resolveProps(start, {\n indicator: 'doc-start',\n next: value ?? end?.[0],\n offset,\n onError,\n startOnNewline: true\n });\n if (props.found) {\n doc.directives.docStart = true;\n if (value &&\n (value.type === 'block-map' || value.type === 'block-seq') &&\n !props.hasNewline)\n onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');\n }\n doc.contents = value\n ? composeNode.composeNode(ctx, value, props, onError)\n : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError);\n const contentEnd = doc.contents.range[2];\n const re = resolveEnd.resolveEnd(end, contentEnd, false, onError);\n if (re.comment)\n doc.comment = re.comment;\n doc.range = [offset, contentEnd, re.offset];\n return doc;\n}\n\nexports.composeDoc = composeDoc;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar composeCollection = require('./compose-collection.js');\nvar composeScalar = require('./compose-scalar.js');\nvar resolveEnd = require('./resolve-end.js');\nvar utilEmptyScalarPosition = require('./util-empty-scalar-position.js');\n\nconst CN = { composeNode, composeEmptyNode };\nfunction composeNode(ctx, token, props, onError) {\n const { spaceBefore, comment, anchor, tag } = props;\n let node;\n let isSrcToken = true;\n switch (token.type) {\n case 'alias':\n node = composeAlias(ctx, token, onError);\n if (anchor || tag)\n onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');\n break;\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n case 'block-scalar':\n node = composeScalar.composeScalar(ctx, token, tag, onError);\n if (anchor)\n node.anchor = anchor.source.substring(1);\n break;\n case 'block-map':\n case 'block-seq':\n case 'flow-collection':\n node = composeCollection.composeCollection(CN, ctx, token, tag, onError);\n if (anchor)\n node.anchor = anchor.source.substring(1);\n break;\n default: {\n const message = token.type === 'error'\n ? token.message\n : `Unsupported token (type: ${token.type})`;\n onError(token, 'UNEXPECTED_TOKEN', message);\n node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);\n isSrcToken = false;\n }\n }\n if (anchor && node.anchor === '')\n onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n if (spaceBefore)\n node.spaceBefore = true;\n if (comment) {\n if (token.type === 'scalar' && token.source === '')\n node.comment = comment;\n else\n node.commentBefore = comment;\n }\n // @ts-expect-error Type checking misses meaning of isSrcToken\n if (ctx.options.keepSourceTokens && isSrcToken)\n node.srcToken = token;\n return node;\n}\nfunction composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) {\n const token = {\n type: 'scalar',\n offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos),\n indent: -1,\n source: ''\n };\n const node = composeScalar.composeScalar(ctx, token, tag, onError);\n if (anchor) {\n node.anchor = anchor.source.substring(1);\n if (node.anchor === '')\n onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n }\n if (spaceBefore)\n node.spaceBefore = true;\n if (comment) {\n node.comment = comment;\n node.range[2] = end;\n }\n return node;\n}\nfunction composeAlias({ options }, { offset, source, end }, onError) {\n const alias = new Alias.Alias(source.substring(1));\n if (alias.source === '')\n onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');\n if (alias.source.endsWith(':'))\n onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);\n const valueEnd = offset + source.length;\n const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError);\n alias.range = [offset, valueEnd, re.offset];\n if (re.comment)\n alias.comment = re.comment;\n return alias;\n}\n\nexports.composeEmptyNode = composeEmptyNode;\nexports.composeNode = composeNode;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveBlockScalar = require('./resolve-block-scalar.js');\nvar resolveFlowScalar = require('./resolve-flow-scalar.js');\n\nfunction composeScalar(ctx, token, tagToken, onError) {\n const { value, type, comment, range } = token.type === 'block-scalar'\n ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError)\n : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError);\n const tagName = tagToken\n ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))\n : null;\n const tag = tagToken && tagName\n ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)\n : token.type === 'scalar'\n ? findScalarTagByTest(ctx, value, token, onError)\n : ctx.schema[Node.SCALAR];\n let scalar;\n try {\n const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);\n scalar = Node.isScalar(res) ? res : new Scalar.Scalar(res);\n }\n catch (error) {\n const msg = error instanceof Error ? error.message : String(error);\n onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg);\n scalar = new Scalar.Scalar(value);\n }\n scalar.range = range;\n scalar.source = value;\n if (type)\n scalar.type = type;\n if (tagName)\n scalar.tag = tagName;\n if (tag.format)\n scalar.format = tag.format;\n if (comment)\n scalar.comment = comment;\n return scalar;\n}\nfunction findScalarTagByName(schema, value, tagName, tagToken, onError) {\n if (tagName === '!')\n return schema[Node.SCALAR]; // non-specific tag\n const matchWithTest = [];\n for (const tag of schema.tags) {\n if (!tag.collection && tag.tag === tagName) {\n if (tag.default && tag.test)\n matchWithTest.push(tag);\n else\n return tag;\n }\n }\n for (const tag of matchWithTest)\n if (tag.test?.test(value))\n return tag;\n const kt = schema.knownTags[tagName];\n if (kt && !kt.collection) {\n // Ensure that the known tag is available for stringifying,\n // but does not get used by default.\n schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));\n return kt;\n }\n onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');\n return schema[Node.SCALAR];\n}\nfunction findScalarTagByTest({ directives, schema }, value, token, onError) {\n const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[Node.SCALAR];\n if (schema.compat) {\n const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??\n schema[Node.SCALAR];\n if (tag.tag !== compat.tag) {\n const ts = directives.tagString(tag.tag);\n const cs = directives.tagString(compat.tag);\n const msg = `Value may be parsed as either ${ts} or ${cs}`;\n onError(token, 'TAG_RESOLVE_FAILED', msg, true);\n }\n }\n return tag;\n}\n\nexports.composeScalar = composeScalar;\n","'use strict';\n\nvar directives = require('../doc/directives.js');\nvar Document = require('../doc/Document.js');\nvar errors = require('../errors.js');\nvar Node = require('../nodes/Node.js');\nvar composeDoc = require('./compose-doc.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction getErrorPos(src) {\n if (typeof src === 'number')\n return [src, src + 1];\n if (Array.isArray(src))\n return src.length === 2 ? src : [src[0], src[1]];\n const { offset, source } = src;\n return [offset, offset + (typeof source === 'string' ? source.length : 1)];\n}\nfunction parsePrelude(prelude) {\n let comment = '';\n let atComment = false;\n let afterEmptyLine = false;\n for (let i = 0; i < prelude.length; ++i) {\n const source = prelude[i];\n switch (source[0]) {\n case '#':\n comment +=\n (comment === '' ? '' : afterEmptyLine ? '\\n\\n' : '\\n') +\n (source.substring(1) || ' ');\n atComment = true;\n afterEmptyLine = false;\n break;\n case '%':\n if (prelude[i + 1]?.[0] !== '#')\n i += 1;\n atComment = false;\n break;\n default:\n // This may be wrong after doc-end, but in that case it doesn't matter\n if (!atComment)\n afterEmptyLine = true;\n atComment = false;\n }\n }\n return { comment, afterEmptyLine };\n}\n/**\n * Compose a stream of CST nodes into a stream of YAML Documents.\n *\n * ```ts\n * import { Composer, Parser } from 'yaml'\n *\n * const src: string = ...\n * const tokens = new Parser().parse(src)\n * const docs = new Composer().compose(tokens)\n * ```\n */\nclass Composer {\n constructor(options = {}) {\n this.doc = null;\n this.atDirectives = false;\n this.prelude = [];\n this.errors = [];\n this.warnings = [];\n this.onError = (source, code, message, warning) => {\n const pos = getErrorPos(source);\n if (warning)\n this.warnings.push(new errors.YAMLWarning(pos, code, message));\n else\n this.errors.push(new errors.YAMLParseError(pos, code, message));\n };\n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n this.directives = new directives.Directives({ version: options.version || '1.2' });\n this.options = options;\n }\n decorate(doc, afterDoc) {\n const { comment, afterEmptyLine } = parsePrelude(this.prelude);\n //console.log({ dc: doc.comment, prelude, comment })\n if (comment) {\n const dc = doc.contents;\n if (afterDoc) {\n doc.comment = doc.comment ? `${doc.comment}\\n${comment}` : comment;\n }\n else if (afterEmptyLine || doc.directives.docStart || !dc) {\n doc.commentBefore = comment;\n }\n else if (Node.isCollection(dc) && !dc.flow && dc.items.length > 0) {\n let it = dc.items[0];\n if (Node.isPair(it))\n it = it.key;\n const cb = it.commentBefore;\n it.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n }\n else {\n const cb = dc.commentBefore;\n dc.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n }\n }\n if (afterDoc) {\n Array.prototype.push.apply(doc.errors, this.errors);\n Array.prototype.push.apply(doc.warnings, this.warnings);\n }\n else {\n doc.errors = this.errors;\n doc.warnings = this.warnings;\n }\n this.prelude = [];\n this.errors = [];\n this.warnings = [];\n }\n /**\n * Current stream status information.\n *\n * Mostly useful at the end of input for an empty stream.\n */\n streamInfo() {\n return {\n comment: parsePrelude(this.prelude).comment,\n directives: this.directives,\n errors: this.errors,\n warnings: this.warnings\n };\n }\n /**\n * Compose tokens into documents.\n *\n * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n */\n *compose(tokens, forceDoc = false, endOffset = -1) {\n for (const token of tokens)\n yield* this.next(token);\n yield* this.end(forceDoc, endOffset);\n }\n /** Advance the composer by one CST token. */\n *next(token) {\n if (process.env.LOG_STREAM)\n console.dir(token, { depth: null });\n switch (token.type) {\n case 'directive':\n this.directives.add(token.source, (offset, message, warning) => {\n const pos = getErrorPos(token);\n pos[0] += offset;\n this.onError(pos, 'BAD_DIRECTIVE', message, warning);\n });\n this.prelude.push(token.source);\n this.atDirectives = true;\n break;\n case 'document': {\n const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError);\n if (this.atDirectives && !doc.directives.docStart)\n this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');\n this.decorate(doc, false);\n if (this.doc)\n yield this.doc;\n this.doc = doc;\n this.atDirectives = false;\n break;\n }\n case 'byte-order-mark':\n case 'space':\n break;\n case 'comment':\n case 'newline':\n this.prelude.push(token.source);\n break;\n case 'error': {\n const msg = token.source\n ? `${token.message}: ${JSON.stringify(token.source)}`\n : token.message;\n const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);\n if (this.atDirectives || !this.doc)\n this.errors.push(error);\n else\n this.doc.errors.push(error);\n break;\n }\n case 'doc-end': {\n if (!this.doc) {\n const msg = 'Unexpected doc-end without preceding document';\n this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));\n break;\n }\n this.doc.directives.docEnd = true;\n const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);\n this.decorate(this.doc, true);\n if (end.comment) {\n const dc = this.doc.comment;\n this.doc.comment = dc ? `${dc}\\n${end.comment}` : end.comment;\n }\n this.doc.range[2] = end.offset;\n break;\n }\n default:\n this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));\n }\n }\n /**\n * Call at end of input to yield any remaining document.\n *\n * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n */\n *end(forceDoc = false, endOffset = -1) {\n if (this.doc) {\n this.decorate(this.doc, true);\n yield this.doc;\n this.doc = null;\n }\n else if (forceDoc) {\n const opts = Object.assign({ _directives: this.directives }, this.options);\n const doc = new Document.Document(undefined, opts);\n if (this.atDirectives)\n this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');\n doc.range = [0, endOffset, endOffset];\n this.decorate(doc, false);\n yield doc;\n }\n }\n}\n\nexports.Composer = Composer;\n","'use strict';\n\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst startColMsg = 'All mapping items must start at the same column';\nfunction resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) {\n const map = new YAMLMap.YAMLMap(ctx.schema);\n if (ctx.atRoot)\n ctx.atRoot = false;\n let offset = bm.offset;\n let commentEnd = null;\n for (const collItem of bm.items) {\n const { start, key, sep, value } = collItem;\n // key properties\n const keyProps = resolveProps.resolveProps(start, {\n indicator: 'explicit-key-ind',\n next: key ?? sep?.[0],\n offset,\n onError,\n startOnNewline: true\n });\n const implicitKey = !keyProps.found;\n if (implicitKey) {\n if (key) {\n if (key.type === 'block-seq')\n onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');\n else if ('indent' in key && key.indent !== bm.indent)\n onError(offset, 'BAD_INDENT', startColMsg);\n }\n if (!keyProps.anchor && !keyProps.tag && !sep) {\n commentEnd = keyProps.end;\n if (keyProps.comment) {\n if (map.comment)\n map.comment += '\\n' + keyProps.comment;\n else\n map.comment = keyProps.comment;\n }\n continue;\n }\n if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) {\n onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');\n }\n }\n else if (keyProps.found?.indent !== bm.indent) {\n onError(offset, 'BAD_INDENT', startColMsg);\n }\n // key value\n const keyStart = keyProps.end;\n const keyNode = key\n ? composeNode(ctx, key, keyProps, onError)\n : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError);\n if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n // value properties\n const valueProps = resolveProps.resolveProps(sep ?? [], {\n indicator: 'map-value-ind',\n next: value,\n offset: keyNode.range[2],\n onError,\n startOnNewline: !key || key.type === 'block-scalar'\n });\n offset = valueProps.end;\n if (valueProps.found) {\n if (implicitKey) {\n if (value?.type === 'block-map' && !valueProps.hasNewline)\n onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');\n if (ctx.options.strict &&\n keyProps.start < valueProps.found.offset - 1024)\n onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');\n }\n // value value\n const valueNode = value\n ? composeNode(ctx, value, valueProps, onError)\n : composeEmptyNode(ctx, offset, sep, null, valueProps, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError);\n offset = valueNode.range[2];\n const pair = new Pair.Pair(keyNode, valueNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n map.items.push(pair);\n }\n else {\n // key with no value\n if (implicitKey)\n onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');\n if (valueProps.comment) {\n if (keyNode.comment)\n keyNode.comment += '\\n' + valueProps.comment;\n else\n keyNode.comment = valueProps.comment;\n }\n const pair = new Pair.Pair(keyNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n map.items.push(pair);\n }\n }\n if (commentEnd && commentEnd < offset)\n onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content');\n map.range = [bm.offset, offset, commentEnd ?? offset];\n return map;\n}\n\nexports.resolveBlockMap = resolveBlockMap;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\n\nfunction resolveBlockScalar(scalar, strict, onError) {\n const start = scalar.offset;\n const header = parseBlockScalarHeader(scalar, strict, onError);\n if (!header)\n return { value: '', type: null, comment: '', range: [start, start, start] };\n const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL;\n const lines = scalar.source ? splitLines(scalar.source) : [];\n // determine the end of content & start of chomping\n let chompStart = lines.length;\n for (let i = lines.length - 1; i >= 0; --i) {\n const content = lines[i][1];\n if (content === '' || content === '\\r')\n chompStart = i;\n else\n break;\n }\n // shortcut for empty contents\n if (chompStart === 0) {\n const value = header.chomp === '+' && lines.length > 0\n ? '\\n'.repeat(Math.max(1, lines.length - 1))\n : '';\n let end = start + header.length;\n if (scalar.source)\n end += scalar.source.length;\n return { value, type, comment: header.comment, range: [start, end, end] };\n }\n // find the indentation level to trim from start\n let trimIndent = scalar.indent + header.indent;\n let offset = scalar.offset + header.length;\n let contentStart = 0;\n for (let i = 0; i < chompStart; ++i) {\n const [indent, content] = lines[i];\n if (content === '' || content === '\\r') {\n if (header.indent === 0 && indent.length > trimIndent)\n trimIndent = indent.length;\n }\n else {\n if (indent.length < trimIndent) {\n const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';\n onError(offset + indent.length, 'MISSING_CHAR', message);\n }\n if (header.indent === 0)\n trimIndent = indent.length;\n contentStart = i;\n break;\n }\n offset += indent.length + content.length + 1;\n }\n // include trailing more-indented empty lines in content\n for (let i = lines.length - 1; i >= chompStart; --i) {\n if (lines[i][0].length > trimIndent)\n chompStart = i + 1;\n }\n let value = '';\n let sep = '';\n let prevMoreIndented = false;\n // leading whitespace is kept intact\n for (let i = 0; i < contentStart; ++i)\n value += lines[i][0].slice(trimIndent) + '\\n';\n for (let i = contentStart; i < chompStart; ++i) {\n let [indent, content] = lines[i];\n offset += indent.length + content.length + 1;\n const crlf = content[content.length - 1] === '\\r';\n if (crlf)\n content = content.slice(0, -1);\n /* istanbul ignore if already caught in lexer */\n if (content && indent.length < trimIndent) {\n const src = header.indent\n ? 'explicit indentation indicator'\n : 'first line';\n const message = `Block scalar lines must not be less indented than their ${src}`;\n onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);\n indent = '';\n }\n if (type === Scalar.Scalar.BLOCK_LITERAL) {\n value += sep + indent.slice(trimIndent) + content;\n sep = '\\n';\n }\n else if (indent.length > trimIndent || content[0] === '\\t') {\n // more-indented content within a folded block\n if (sep === ' ')\n sep = '\\n';\n else if (!prevMoreIndented && sep === '\\n')\n sep = '\\n\\n';\n value += sep + indent.slice(trimIndent) + content;\n sep = '\\n';\n prevMoreIndented = true;\n }\n else if (content === '') {\n // empty line\n if (sep === '\\n')\n value += '\\n';\n else\n sep = '\\n';\n }\n else {\n value += sep + content;\n sep = ' ';\n prevMoreIndented = false;\n }\n }\n switch (header.chomp) {\n case '-':\n break;\n case '+':\n for (let i = chompStart; i < lines.length; ++i)\n value += '\\n' + lines[i][0].slice(trimIndent);\n if (value[value.length - 1] !== '\\n')\n value += '\\n';\n break;\n default:\n value += '\\n';\n }\n const end = start + header.length + scalar.source.length;\n return { value, type, comment: header.comment, range: [start, end, end] };\n}\nfunction parseBlockScalarHeader({ offset, props }, strict, onError) {\n /* istanbul ignore if should not happen */\n if (props[0].type !== 'block-scalar-header') {\n onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');\n return null;\n }\n const { source } = props[0];\n const mode = source[0];\n let indent = 0;\n let chomp = '';\n let error = -1;\n for (let i = 1; i < source.length; ++i) {\n const ch = source[i];\n if (!chomp && (ch === '-' || ch === '+'))\n chomp = ch;\n else {\n const n = Number(ch);\n if (!indent && n)\n indent = n;\n else if (error === -1)\n error = offset + i;\n }\n }\n if (error !== -1)\n onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);\n let hasSpace = false;\n let comment = '';\n let length = source.length;\n for (let i = 1; i < props.length; ++i) {\n const token = props[i];\n switch (token.type) {\n case 'space':\n hasSpace = true;\n // fallthrough\n case 'newline':\n length += token.source.length;\n break;\n case 'comment':\n if (strict && !hasSpace) {\n const message = 'Comments must be separated from other tokens by white space characters';\n onError(token, 'MISSING_CHAR', message);\n }\n length += token.source.length;\n comment = token.source.substring(1);\n break;\n case 'error':\n onError(token, 'UNEXPECTED_TOKEN', token.message);\n length += token.source.length;\n break;\n /* istanbul ignore next should not happen */\n default: {\n const message = `Unexpected token in block scalar header: ${token.type}`;\n onError(token, 'UNEXPECTED_TOKEN', message);\n const ts = token.source;\n if (ts && typeof ts === 'string')\n length += ts.length;\n }\n }\n }\n return { mode, indent, chomp, comment, length };\n}\n/** @returns Array of lines split up as `[indent, content]` */\nfunction splitLines(source) {\n const split = source.split(/\\n( *)/);\n const first = split[0];\n const m = first.match(/^( *)/);\n const line0 = m?.[1]\n ? [m[1], first.slice(m[1].length)]\n : ['', first];\n const lines = [line0];\n for (let i = 1; i < split.length; i += 2)\n lines.push([split[i], split[i + 1]]);\n return lines;\n}\n\nexports.resolveBlockScalar = resolveBlockScalar;\n","'use strict';\n\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\n\nfunction resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) {\n const seq = new YAMLSeq.YAMLSeq(ctx.schema);\n if (ctx.atRoot)\n ctx.atRoot = false;\n let offset = bs.offset;\n let commentEnd = null;\n for (const { start, value } of bs.items) {\n const props = resolveProps.resolveProps(start, {\n indicator: 'seq-item-ind',\n next: value,\n offset,\n onError,\n startOnNewline: true\n });\n if (!props.found) {\n if (props.anchor || props.tag || value) {\n if (value && value.type === 'block-seq')\n onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column');\n else\n onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');\n }\n else {\n commentEnd = props.end;\n if (props.comment)\n seq.comment = props.comment;\n continue;\n }\n }\n const node = value\n ? composeNode(ctx, value, props, onError)\n : composeEmptyNode(ctx, props.end, start, null, props, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError);\n offset = node.range[2];\n seq.items.push(node);\n }\n seq.range = [bs.offset, offset, commentEnd ?? offset];\n return seq;\n}\n\nexports.resolveBlockSeq = resolveBlockSeq;\n","'use strict';\n\nfunction resolveEnd(end, offset, reqSpace, onError) {\n let comment = '';\n if (end) {\n let hasSpace = false;\n let sep = '';\n for (const token of end) {\n const { source, type } = token;\n switch (type) {\n case 'space':\n hasSpace = true;\n break;\n case 'comment': {\n if (reqSpace && !hasSpace)\n onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n const cb = source.substring(1) || ' ';\n if (!comment)\n comment = cb;\n else\n comment += sep + cb;\n sep = '';\n break;\n }\n case 'newline':\n if (comment)\n sep += source;\n hasSpace = true;\n break;\n default:\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);\n }\n offset += source.length;\n }\n }\n return { comment, offset };\n}\n\nexports.resolveEnd = resolveEnd;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst blockMsg = 'Block collections are not allowed within flow collections';\nconst isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');\nfunction resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) {\n const isMap = fc.start.source === '{';\n const fcName = isMap ? 'flow map' : 'flow sequence';\n const coll = isMap\n ? new YAMLMap.YAMLMap(ctx.schema)\n : new YAMLSeq.YAMLSeq(ctx.schema);\n coll.flow = true;\n const atRoot = ctx.atRoot;\n if (atRoot)\n ctx.atRoot = false;\n let offset = fc.offset + fc.start.source.length;\n for (let i = 0; i < fc.items.length; ++i) {\n const collItem = fc.items[i];\n const { start, key, sep, value } = collItem;\n const props = resolveProps.resolveProps(start, {\n flow: fcName,\n indicator: 'explicit-key-ind',\n next: key ?? sep?.[0],\n offset,\n onError,\n startOnNewline: false\n });\n if (!props.found) {\n if (!props.anchor && !props.tag && !sep && !value) {\n if (i === 0 && props.comma)\n onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n else if (i < fc.items.length - 1)\n onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);\n if (props.comment) {\n if (coll.comment)\n coll.comment += '\\n' + props.comment;\n else\n coll.comment = props.comment;\n }\n offset = props.end;\n continue;\n }\n if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key))\n onError(key, // checked by containsNewline()\n 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n }\n if (i === 0) {\n if (props.comma)\n onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n }\n else {\n if (!props.comma)\n onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);\n if (props.comment) {\n let prevItemComment = '';\n loop: for (const st of start) {\n switch (st.type) {\n case 'comma':\n case 'space':\n break;\n case 'comment':\n prevItemComment = st.source.substring(1);\n break loop;\n default:\n break loop;\n }\n }\n if (prevItemComment) {\n let prev = coll.items[coll.items.length - 1];\n if (Node.isPair(prev))\n prev = prev.value ?? prev.key;\n if (prev.comment)\n prev.comment += '\\n' + prevItemComment;\n else\n prev.comment = prevItemComment;\n props.comment = props.comment.substring(prevItemComment.length + 1);\n }\n }\n }\n if (!isMap && !sep && !props.found) {\n // item is a value in a seq\n // → key & sep are empty, start does not include ? or :\n const valueNode = value\n ? composeNode(ctx, value, props, onError)\n : composeEmptyNode(ctx, props.end, sep, null, props, onError);\n coll.items.push(valueNode);\n offset = valueNode.range[2];\n if (isBlock(value))\n onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n }\n else {\n // item is a key+value pair\n // key value\n const keyStart = props.end;\n const keyNode = key\n ? composeNode(ctx, key, props, onError)\n : composeEmptyNode(ctx, keyStart, start, null, props, onError);\n if (isBlock(key))\n onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);\n // value properties\n const valueProps = resolveProps.resolveProps(sep ?? [], {\n flow: fcName,\n indicator: 'map-value-ind',\n next: value,\n offset: keyNode.range[2],\n onError,\n startOnNewline: false\n });\n if (valueProps.found) {\n if (!isMap && !props.found && ctx.options.strict) {\n if (sep)\n for (const st of sep) {\n if (st === valueProps.found)\n break;\n if (st.type === 'newline') {\n onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n break;\n }\n }\n if (props.start < valueProps.found.offset - 1024)\n onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');\n }\n }\n else if (value) {\n if ('source' in value && value.source && value.source[0] === ':')\n onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);\n else\n onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);\n }\n // value value\n const valueNode = value\n ? composeNode(ctx, value, valueProps, onError)\n : valueProps.found\n ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)\n : null;\n if (valueNode) {\n if (isBlock(value))\n onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n }\n else if (valueProps.comment) {\n if (keyNode.comment)\n keyNode.comment += '\\n' + valueProps.comment;\n else\n keyNode.comment = valueProps.comment;\n }\n const pair = new Pair.Pair(keyNode, valueNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n if (isMap) {\n const map = coll;\n if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n map.items.push(pair);\n }\n else {\n const map = new YAMLMap.YAMLMap(ctx.schema);\n map.flow = true;\n map.items.push(pair);\n coll.items.push(map);\n }\n offset = valueNode ? valueNode.range[2] : valueProps.end;\n }\n }\n const expectedEnd = isMap ? '}' : ']';\n const [ce, ...ee] = fc.end;\n let cePos = offset;\n if (ce && ce.source === expectedEnd)\n cePos = ce.offset + ce.source.length;\n else {\n const name = fcName[0].toUpperCase() + fcName.substring(1);\n const msg = atRoot\n ? `${name} must end with a ${expectedEnd}`\n : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;\n onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);\n if (ce && ce.source.length !== 1)\n ee.unshift(ce);\n }\n if (ee.length > 0) {\n const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError);\n if (end.comment) {\n if (coll.comment)\n coll.comment += '\\n' + end.comment;\n else\n coll.comment = end.comment;\n }\n coll.range = [fc.offset, cePos, end.offset];\n }\n else {\n coll.range = [fc.offset, cePos, cePos];\n }\n return coll;\n}\n\nexports.resolveFlowCollection = resolveFlowCollection;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction resolveFlowScalar(scalar, strict, onError) {\n const { offset, type, source, end } = scalar;\n let _type;\n let value;\n const _onError = (rel, code, msg) => onError(offset + rel, code, msg);\n switch (type) {\n case 'scalar':\n _type = Scalar.Scalar.PLAIN;\n value = plainValue(source, _onError);\n break;\n case 'single-quoted-scalar':\n _type = Scalar.Scalar.QUOTE_SINGLE;\n value = singleQuotedValue(source, _onError);\n break;\n case 'double-quoted-scalar':\n _type = Scalar.Scalar.QUOTE_DOUBLE;\n value = doubleQuotedValue(source, _onError);\n break;\n /* istanbul ignore next should not happen */\n default:\n onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);\n return {\n value: '',\n type: null,\n comment: '',\n range: [offset, offset + source.length, offset + source.length]\n };\n }\n const valueEnd = offset + source.length;\n const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError);\n return {\n value,\n type: _type,\n comment: re.comment,\n range: [offset, valueEnd, re.offset]\n };\n}\nfunction plainValue(source, onError) {\n let badChar = '';\n switch (source[0]) {\n /* istanbul ignore next should not happen */\n case '\\t':\n badChar = 'a tab character';\n break;\n case ',':\n badChar = 'flow indicator character ,';\n break;\n case '%':\n badChar = 'directive indicator character %';\n break;\n case '|':\n case '>': {\n badChar = `block scalar indicator ${source[0]}`;\n break;\n }\n case '@':\n case '`': {\n badChar = `reserved character ${source[0]}`;\n break;\n }\n }\n if (badChar)\n onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);\n return foldLines(source);\n}\nfunction singleQuotedValue(source, onError) {\n if (source[source.length - 1] !== \"'\" || source.length === 1)\n onError(source.length, 'MISSING_CHAR', \"Missing closing 'quote\");\n return foldLines(source.slice(1, -1)).replace(/''/g, \"'\");\n}\nfunction foldLines(source) {\n /**\n * The negative lookbehind here and in the `re` RegExp is to\n * prevent causing a polynomial search time in certain cases.\n *\n * The try-catch is for Safari, which doesn't support this yet:\n * https://caniuse.com/js-regexp-lookbehind\n */\n let first, line;\n try {\n first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch;\n }\n else {\n res += ch;\n }\n }\n if (source[source.length - 1] !== '\"' || source.length === 1)\n onError(source.length, 'MISSING_CHAR', 'Missing closing \"quote');\n return res;\n}\n/**\n * Fold a single newline into a space, multiple newlines to N - 1 newlines.\n * Presumes `source[offset] === '\\n'`\n */\nfunction foldNewline(source, offset) {\n let fold = '';\n let ch = source[offset + 1];\n while (ch === ' ' || ch === '\\t' || ch === '\\n' || ch === '\\r') {\n if (ch === '\\r' && source[offset + 2] !== '\\n')\n break;\n if (ch === '\\n')\n fold += '\\n';\n offset += 1;\n ch = source[offset + 1];\n }\n if (!fold)\n fold = ' ';\n return { fold, offset };\n}\nconst escapeCodes = {\n '0': '\\0',\n a: '\\x07',\n b: '\\b',\n e: '\\x1b',\n f: '\\f',\n n: '\\n',\n r: '\\r',\n t: '\\t',\n v: '\\v',\n N: '\\u0085',\n _: '\\u00a0',\n L: '\\u2028',\n P: '\\u2029',\n ' ': ' ',\n '\"': '\"',\n '/': '/',\n '\\\\': '\\\\',\n '\\t': '\\t'\n};\nfunction parseCharCode(source, offset, length, onError) {\n const cc = source.substr(offset, length);\n const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);\n const code = ok ? parseInt(cc, 16) : NaN;\n if (isNaN(code)) {\n const raw = source.substr(offset - 2, length + 2);\n onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);\n return raw;\n }\n return String.fromCodePoint(code);\n}\n\nexports.resolveFlowScalar = resolveFlowScalar;\n","'use strict';\n\nfunction resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {\n let spaceBefore = false;\n let atNewline = startOnNewline;\n let hasSpace = startOnNewline;\n let comment = '';\n let commentSep = '';\n let hasNewline = false;\n let hasNewlineAfterProp = false;\n let reqSpace = false;\n let anchor = null;\n let tag = null;\n let comma = null;\n let found = null;\n let start = null;\n for (const token of tokens) {\n if (reqSpace) {\n if (token.type !== 'space' &&\n token.type !== 'newline' &&\n token.type !== 'comma')\n onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n reqSpace = false;\n }\n switch (token.type) {\n case 'space':\n // At the doc level, tabs at line start may be parsed\n // as leading white space rather than indentation.\n // In a flow collection, only the parser handles indent.\n if (!flow &&\n atNewline &&\n indicator !== 'doc-start' &&\n token.source[0] === '\\t')\n onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');\n hasSpace = true;\n break;\n case 'comment': {\n if (!hasSpace)\n onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n const cb = token.source.substring(1) || ' ';\n if (!comment)\n comment = cb;\n else\n comment += commentSep + cb;\n commentSep = '';\n atNewline = false;\n break;\n }\n case 'newline':\n if (atNewline) {\n if (comment)\n comment += token.source;\n else\n spaceBefore = true;\n }\n else\n commentSep += token.source;\n atNewline = true;\n hasNewline = true;\n if (anchor || tag)\n hasNewlineAfterProp = true;\n hasSpace = true;\n break;\n case 'anchor':\n if (anchor)\n onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');\n if (token.source.endsWith(':'))\n onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);\n anchor = token;\n if (start === null)\n start = token.offset;\n atNewline = false;\n hasSpace = false;\n reqSpace = true;\n break;\n case 'tag': {\n if (tag)\n onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');\n tag = token;\n if (start === null)\n start = token.offset;\n atNewline = false;\n hasSpace = false;\n reqSpace = true;\n break;\n }\n case indicator:\n // Could here handle preceding comments differently\n if (anchor || tag)\n onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);\n if (found)\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);\n found = token;\n atNewline = false;\n hasSpace = false;\n break;\n case 'comma':\n if (flow) {\n if (comma)\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);\n comma = token;\n atNewline = false;\n hasSpace = false;\n break;\n }\n // else fallthrough\n default:\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);\n atNewline = false;\n hasSpace = false;\n }\n }\n const last = tokens[tokens.length - 1];\n const end = last ? last.offset + last.source.length : offset;\n if (reqSpace &&\n next &&\n next.type !== 'space' &&\n next.type !== 'newline' &&\n next.type !== 'comma' &&\n (next.type !== 'scalar' || next.source !== ''))\n onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n return {\n comma,\n found,\n spaceBefore,\n comment,\n hasNewline,\n hasNewlineAfterProp,\n anchor,\n tag,\n end,\n start: start ?? end\n };\n}\n\nexports.resolveProps = resolveProps;\n","'use strict';\n\nfunction containsNewline(key) {\n if (!key)\n return null;\n switch (key.type) {\n case 'alias':\n case 'scalar':\n case 'double-quoted-scalar':\n case 'single-quoted-scalar':\n if (key.source.includes('\\n'))\n return true;\n if (key.end)\n for (const st of key.end)\n if (st.type === 'newline')\n return true;\n return false;\n case 'flow-collection':\n for (const it of key.items) {\n for (const st of it.start)\n if (st.type === 'newline')\n return true;\n if (it.sep)\n for (const st of it.sep)\n if (st.type === 'newline')\n return true;\n if (containsNewline(it.key) || containsNewline(it.value))\n return true;\n }\n return false;\n default:\n return true;\n }\n}\n\nexports.containsNewline = containsNewline;\n","'use strict';\n\nfunction emptyScalarPosition(offset, before, pos) {\n if (before) {\n if (pos === null)\n pos = before.length;\n for (let i = pos - 1; i >= 0; --i) {\n let st = before[i];\n switch (st.type) {\n case 'space':\n case 'comment':\n case 'newline':\n offset -= st.source.length;\n continue;\n }\n // Technically, an empty scalar is immediately after the last non-empty\n // node, but it's more useful to place it after any whitespace.\n st = before[++i];\n while (st?.type === 'space') {\n offset += st.source.length;\n st = before[++i];\n }\n break;\n }\n }\n return offset;\n}\n\nexports.emptyScalarPosition = emptyScalarPosition;\n","'use strict';\n\nvar utilContainsNewline = require('./util-contains-newline.js');\n\nfunction flowIndentCheck(indent, fc, onError) {\n if (fc?.type === 'flow-collection') {\n const end = fc.end[0];\n if (end.indent === indent &&\n (end.source === ']' || end.source === '}') &&\n utilContainsNewline.containsNewline(fc)) {\n const msg = 'Flow end indicator should be more indented than parent';\n onError(end, 'BAD_INDENT', msg, true);\n }\n }\n}\n\nexports.flowIndentCheck = flowIndentCheck;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\n\nfunction mapIncludes(ctx, items, search) {\n const { uniqueKeys } = ctx.options;\n if (uniqueKeys === false)\n return false;\n const isEqual = typeof uniqueKeys === 'function'\n ? uniqueKeys\n : (a, b) => a === b ||\n (Node.isScalar(a) &&\n Node.isScalar(b) &&\n a.value === b.value &&\n !(a.value === '<<' && ctx.schema.merge));\n return items.some(pair => isEqual(pair.key, search));\n}\n\nexports.mapIncludes = mapIncludes;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar Collection = require('../nodes/Collection.js');\nvar Node = require('../nodes/Node.js');\nvar Pair = require('../nodes/Pair.js');\nvar toJS = require('../nodes/toJS.js');\nvar Schema = require('../schema/Schema.js');\nvar stringify = require('../stringify/stringify.js');\nvar stringifyDocument = require('../stringify/stringifyDocument.js');\nvar anchors = require('./anchors.js');\nvar applyReviver = require('./applyReviver.js');\nvar createNode = require('./createNode.js');\nvar directives = require('./directives.js');\n\nclass Document {\n constructor(value, replacer, options) {\n /** A comment before this Document */\n this.commentBefore = null;\n /** A comment immediately after this Document */\n this.comment = null;\n /** Errors encountered during parsing. */\n this.errors = [];\n /** Warnings encountered during parsing. */\n this.warnings = [];\n Object.defineProperty(this, Node.NODE_TYPE, { value: Node.DOC });\n let _replacer = null;\n if (typeof replacer === 'function' || Array.isArray(replacer)) {\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n replacer = undefined;\n }\n const opt = Object.assign({\n intAsBigInt: false,\n keepSourceTokens: false,\n logLevel: 'warn',\n prettyErrors: true,\n strict: true,\n uniqueKeys: true,\n version: '1.2'\n }, options);\n this.options = opt;\n let { version } = opt;\n if (options?._directives) {\n this.directives = options._directives.atDocument();\n if (this.directives.yaml.explicit)\n version = this.directives.yaml.version;\n }\n else\n this.directives = new directives.Directives({ version });\n this.setSchema(version, options);\n if (value === undefined)\n this.contents = null;\n else {\n this.contents = this.createNode(value, _replacer, options);\n }\n }\n /**\n * Create a deep copy of this Document and its contents.\n *\n * Custom Node values that inherit from `Object` still refer to their original instances.\n */\n clone() {\n const copy = Object.create(Document.prototype, {\n [Node.NODE_TYPE]: { value: Node.DOC }\n });\n copy.commentBefore = this.commentBefore;\n copy.comment = this.comment;\n copy.errors = this.errors.slice();\n copy.warnings = this.warnings.slice();\n copy.options = Object.assign({}, this.options);\n if (this.directives)\n copy.directives = this.directives.clone();\n copy.schema = this.schema.clone();\n copy.contents = Node.isNode(this.contents)\n ? this.contents.clone(copy.schema)\n : this.contents;\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /** Adds a value to the document. */\n add(value) {\n if (assertCollection(this.contents))\n this.contents.add(value);\n }\n /** Adds a value to the document. */\n addIn(path, value) {\n if (assertCollection(this.contents))\n this.contents.addIn(path, value);\n }\n /**\n * Create a new `Alias` node, ensuring that the target `node` has the required anchor.\n *\n * If `node` already has an anchor, `name` is ignored.\n * Otherwise, the `node.anchor` value will be set to `name`,\n * or if an anchor with that name is already present in the document,\n * `name` will be used as a prefix for a new unique anchor.\n * If `name` is undefined, the generated anchor will use 'a' as a prefix.\n */\n createAlias(node, name) {\n if (!node.anchor) {\n const prev = anchors.anchorNames(this);\n node.anchor =\n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name;\n }\n return new Alias.Alias(node.anchor);\n }\n createNode(value, replacer, options) {\n let _replacer = undefined;\n if (typeof replacer === 'function') {\n value = replacer.call({ '': value }, '', value);\n _replacer = replacer;\n }\n else if (Array.isArray(replacer)) {\n const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;\n const asStr = replacer.filter(keyToStr).map(String);\n if (asStr.length > 0)\n replacer = replacer.concat(asStr);\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n replacer = undefined;\n }\n const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {};\n const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, \n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n anchorPrefix || 'a');\n const ctx = {\n aliasDuplicateObjects: aliasDuplicateObjects ?? true,\n keepUndefined: keepUndefined ?? false,\n onAnchor,\n onTagObj,\n replacer: _replacer,\n schema: this.schema,\n sourceObjects\n };\n const node = createNode.createNode(value, tag, ctx);\n if (flow && Node.isCollection(node))\n node.flow = true;\n setAnchors();\n return node;\n }\n /**\n * Convert a key and a value into a `Pair` using the current schema,\n * recursively wrapping all values as `Scalar` or `Collection` nodes.\n */\n createPair(key, value, options = {}) {\n const k = this.createNode(key, null, options);\n const v = this.createNode(value, null, options);\n return new Pair.Pair(k, v);\n }\n /**\n * Removes a value from the document.\n * @returns `true` if the item was found and removed.\n */\n delete(key) {\n return assertCollection(this.contents) ? this.contents.delete(key) : false;\n }\n /**\n * Removes a value from the document.\n * @returns `true` if the item was found and removed.\n */\n deleteIn(path) {\n if (Collection.isEmptyPath(path)) {\n if (this.contents == null)\n return false;\n this.contents = null;\n return true;\n }\n return assertCollection(this.contents)\n ? this.contents.deleteIn(path)\n : false;\n }\n /**\n * Returns item at `key`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n get(key, keepScalar) {\n return Node.isCollection(this.contents)\n ? this.contents.get(key, keepScalar)\n : undefined;\n }\n /**\n * Returns item at `path`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n getIn(path, keepScalar) {\n if (Collection.isEmptyPath(path))\n return !keepScalar && Node.isScalar(this.contents)\n ? this.contents.value\n : this.contents;\n return Node.isCollection(this.contents)\n ? this.contents.getIn(path, keepScalar)\n : undefined;\n }\n /**\n * Checks if the document includes a value with the key `key`.\n */\n has(key) {\n return Node.isCollection(this.contents) ? this.contents.has(key) : false;\n }\n /**\n * Checks if the document includes a value at `path`.\n */\n hasIn(path) {\n if (Collection.isEmptyPath(path))\n return this.contents !== undefined;\n return Node.isCollection(this.contents) ? this.contents.hasIn(path) : false;\n }\n /**\n * Sets a value in this document. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n set(key, value) {\n if (this.contents == null) {\n this.contents = Collection.collectionFromPath(this.schema, [key], value);\n }\n else if (assertCollection(this.contents)) {\n this.contents.set(key, value);\n }\n }\n /**\n * Sets a value in this document. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n setIn(path, value) {\n if (Collection.isEmptyPath(path))\n this.contents = value;\n else if (this.contents == null) {\n this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value);\n }\n else if (assertCollection(this.contents)) {\n this.contents.setIn(path, value);\n }\n }\n /**\n * Change the YAML version and schema used by the document.\n * A `null` version disables support for directives, explicit tags, anchors, and aliases.\n * It also requires the `schema` option to be given as a `Schema` instance value.\n *\n * Overrides all previously set schema options.\n */\n setSchema(version, options = {}) {\n if (typeof version === 'number')\n version = String(version);\n let opt;\n switch (version) {\n case '1.1':\n if (this.directives)\n this.directives.yaml.version = '1.1';\n else\n this.directives = new directives.Directives({ version: '1.1' });\n opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };\n break;\n case '1.2':\n case 'next':\n if (this.directives)\n this.directives.yaml.version = version;\n else\n this.directives = new directives.Directives({ version });\n opt = { merge: false, resolveKnownTags: true, schema: 'core' };\n break;\n case null:\n if (this.directives)\n delete this.directives;\n opt = null;\n break;\n default: {\n const sv = JSON.stringify(version);\n throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);\n }\n }\n // Not using `instanceof Schema` to allow for duck typing\n if (options.schema instanceof Object)\n this.schema = options.schema;\n else if (opt)\n this.schema = new Schema.Schema(Object.assign(opt, options));\n else\n throw new Error(`With a null YAML version, the { schema: Schema } option is required`);\n }\n // json & jsonArg are only used from toJSON()\n toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {\n const ctx = {\n anchors: new Map(),\n doc: this,\n keep: !json,\n mapAsMap: mapAsMap === true,\n mapKeyWarned: false,\n maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100,\n stringify: stringify.stringify\n };\n const res = toJS.toJS(this.contents, jsonArg ?? '', ctx);\n if (typeof onAnchor === 'function')\n for (const { count, res } of ctx.anchors.values())\n onAnchor(res, count);\n return typeof reviver === 'function'\n ? applyReviver.applyReviver(reviver, { '': res }, '', res)\n : res;\n }\n /**\n * A JSON representation of the document `contents`.\n *\n * @param jsonArg Used by `JSON.stringify` to indicate the array index or\n * property name.\n */\n toJSON(jsonArg, onAnchor) {\n return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });\n }\n /** A YAML representation of the document. */\n toString(options = {}) {\n if (this.errors.length > 0)\n throw new Error('Document with errors cannot be stringified');\n if ('indent' in options &&\n (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {\n const s = JSON.stringify(options.indent);\n throw new Error(`\"indent\" option must be a positive integer, not ${s}`);\n }\n return stringifyDocument.stringifyDocument(this, options);\n }\n}\nfunction assertCollection(contents) {\n if (Node.isCollection(contents))\n return true;\n throw new Error('Expected a YAML collection as document contents');\n}\n\nexports.Document = Document;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar visit = require('../visit.js');\n\n/**\n * Verify that the input string is a valid anchor.\n *\n * Will throw on errors.\n */\nfunction anchorIsValid(anchor) {\n if (/[\\x00-\\x19\\s,[\\]{}]/.test(anchor)) {\n const sa = JSON.stringify(anchor);\n const msg = `Anchor must not contain whitespace or control characters: ${sa}`;\n throw new Error(msg);\n }\n return true;\n}\nfunction anchorNames(root) {\n const anchors = new Set();\n visit.visit(root, {\n Value(_key, node) {\n if (node.anchor)\n anchors.add(node.anchor);\n }\n });\n return anchors;\n}\n/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */\nfunction findNewAnchor(prefix, exclude) {\n for (let i = 1; true; ++i) {\n const name = `${prefix}${i}`;\n if (!exclude.has(name))\n return name;\n }\n}\nfunction createNodeAnchors(doc, prefix) {\n const aliasObjects = [];\n const sourceObjects = new Map();\n let prevAnchors = null;\n return {\n onAnchor: (source) => {\n aliasObjects.push(source);\n if (!prevAnchors)\n prevAnchors = anchorNames(doc);\n const anchor = findNewAnchor(prefix, prevAnchors);\n prevAnchors.add(anchor);\n return anchor;\n },\n /**\n * With circular references, the source node is only resolved after all\n * of its child nodes are. This is why anchors are set only after all of\n * the nodes have been created.\n */\n setAnchors: () => {\n for (const source of aliasObjects) {\n const ref = sourceObjects.get(source);\n if (typeof ref === 'object' &&\n ref.anchor &&\n (Node.isScalar(ref.node) || Node.isCollection(ref.node))) {\n ref.node.anchor = ref.anchor;\n }\n else {\n const error = new Error('Failed to resolve repeated object (this should not happen)');\n error.source = source;\n throw error;\n }\n }\n },\n sourceObjects\n };\n}\n\nexports.anchorIsValid = anchorIsValid;\nexports.anchorNames = anchorNames;\nexports.createNodeAnchors = createNodeAnchors;\nexports.findNewAnchor = findNewAnchor;\n","'use strict';\n\n/**\n * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,\n * in section 24.5.1.1 \"Runtime Semantics: InternalizeJSONProperty\" of the\n * 2021 edition: https://tc39.es/ecma262/#sec-json.parse\n *\n * Includes extensions for handling Map and Set objects.\n */\nfunction applyReviver(reviver, obj, key, val) {\n if (val && typeof val === 'object') {\n if (Array.isArray(val)) {\n for (let i = 0, len = val.length; i < len; ++i) {\n const v0 = val[i];\n const v1 = applyReviver(reviver, val, String(i), v0);\n if (v1 === undefined)\n delete val[i];\n else if (v1 !== v0)\n val[i] = v1;\n }\n }\n else if (val instanceof Map) {\n for (const k of Array.from(val.keys())) {\n const v0 = val.get(k);\n const v1 = applyReviver(reviver, val, k, v0);\n if (v1 === undefined)\n val.delete(k);\n else if (v1 !== v0)\n val.set(k, v1);\n }\n }\n else if (val instanceof Set) {\n for (const v0 of Array.from(val)) {\n const v1 = applyReviver(reviver, val, v0, v0);\n if (v1 === undefined)\n val.delete(v0);\n else if (v1 !== v0) {\n val.delete(v0);\n val.add(v1);\n }\n }\n }\n else {\n for (const [k, v0] of Object.entries(val)) {\n const v1 = applyReviver(reviver, val, k, v0);\n if (v1 === undefined)\n delete val[k];\n else if (v1 !== v0)\n val[k] = v1;\n }\n }\n }\n return reviver.call(obj, key, val);\n}\n\nexports.applyReviver = applyReviver;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\n\nconst defaultTagPrefix = 'tag:yaml.org,2002:';\nfunction findTagObject(value, tagName, tags) {\n if (tagName) {\n const match = tags.filter(t => t.tag === tagName);\n const tagObj = match.find(t => !t.format) ?? match[0];\n if (!tagObj)\n throw new Error(`Tag ${tagName} not found`);\n return tagObj;\n }\n return tags.find(t => t.identify?.(value) && !t.format);\n}\nfunction createNode(value, tagName, ctx) {\n if (Node.isDocument(value))\n value = value.contents;\n if (Node.isNode(value))\n return value;\n if (Node.isPair(value)) {\n const map = ctx.schema[Node.MAP].createNode?.(ctx.schema, null, ctx);\n map.items.push(value);\n return map;\n }\n if (value instanceof String ||\n value instanceof Number ||\n value instanceof Boolean ||\n (typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere\n ) {\n // https://tc39.es/ecma262/#sec-serializejsonproperty\n value = value.valueOf();\n }\n const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;\n // Detect duplicate references to the same object & use Alias nodes for all\n // after first. The `ref` wrapper allows for circular references to resolve.\n let ref = undefined;\n if (aliasDuplicateObjects && value && typeof value === 'object') {\n ref = sourceObjects.get(value);\n if (ref) {\n if (!ref.anchor)\n ref.anchor = onAnchor(value);\n return new Alias.Alias(ref.anchor);\n }\n else {\n ref = { anchor: null, node: null };\n sourceObjects.set(value, ref);\n }\n }\n if (tagName?.startsWith('!!'))\n tagName = defaultTagPrefix + tagName.slice(2);\n let tagObj = findTagObject(value, tagName, schema.tags);\n if (!tagObj) {\n if (value && typeof value.toJSON === 'function') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n value = value.toJSON();\n }\n if (!value || typeof value !== 'object') {\n const node = new Scalar.Scalar(value);\n if (ref)\n ref.node = node;\n return node;\n }\n tagObj =\n value instanceof Map\n ? schema[Node.MAP]\n : Symbol.iterator in Object(value)\n ? schema[Node.SEQ]\n : schema[Node.MAP];\n }\n if (onTagObj) {\n onTagObj(tagObj);\n delete ctx.onTagObj;\n }\n const node = tagObj?.createNode\n ? tagObj.createNode(ctx.schema, value, ctx)\n : new Scalar.Scalar(value);\n if (tagName)\n node.tag = tagName;\n if (ref)\n ref.node = node;\n return node;\n}\n\nexports.createNode = createNode;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar visit = require('../visit.js');\n\nconst escapeChars = {\n '!': '%21',\n ',': '%2C',\n '[': '%5B',\n ']': '%5D',\n '{': '%7B',\n '}': '%7D'\n};\nconst escapeTagName = (tn) => tn.replace(/[!,[\\]{}]/g, ch => escapeChars[ch]);\nclass Directives {\n constructor(yaml, tags) {\n /**\n * The directives-end/doc-start marker `---`. If `null`, a marker may still be\n * included in the document's stringified representation.\n */\n this.docStart = null;\n /** The doc-end marker `...`. */\n this.docEnd = false;\n this.yaml = Object.assign({}, Directives.defaultYaml, yaml);\n this.tags = Object.assign({}, Directives.defaultTags, tags);\n }\n clone() {\n const copy = new Directives(this.yaml, this.tags);\n copy.docStart = this.docStart;\n return copy;\n }\n /**\n * During parsing, get a Directives instance for the current document and\n * update the stream state according to the current version's spec.\n */\n atDocument() {\n const res = new Directives(this.yaml, this.tags);\n switch (this.yaml.version) {\n case '1.1':\n this.atNextDocument = true;\n break;\n case '1.2':\n this.atNextDocument = false;\n this.yaml = {\n explicit: Directives.defaultYaml.explicit,\n version: '1.2'\n };\n this.tags = Object.assign({}, Directives.defaultTags);\n break;\n }\n return res;\n }\n /**\n * @param onError - May be called even if the action was successful\n * @returns `true` on success\n */\n add(line, onError) {\n if (this.atNextDocument) {\n this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };\n this.tags = Object.assign({}, Directives.defaultTags);\n this.atNextDocument = false;\n }\n const parts = line.trim().split(/[ \\t]+/);\n const name = parts.shift();\n switch (name) {\n case '%TAG': {\n if (parts.length !== 2) {\n onError(0, '%TAG directive should contain exactly two parts');\n if (parts.length < 2)\n return false;\n }\n const [handle, prefix] = parts;\n this.tags[handle] = prefix;\n return true;\n }\n case '%YAML': {\n this.yaml.explicit = true;\n if (parts.length !== 1) {\n onError(0, '%YAML directive should contain exactly one part');\n return false;\n }\n const [version] = parts;\n if (version === '1.1' || version === '1.2') {\n this.yaml.version = version;\n return true;\n }\n else {\n const isValid = /^\\d+\\.\\d+$/.test(version);\n onError(6, `Unsupported YAML version ${version}`, isValid);\n return false;\n }\n }\n default:\n onError(0, `Unknown directive ${name}`, true);\n return false;\n }\n }\n /**\n * Resolves a tag, matching handles to those defined in %TAG directives.\n *\n * @returns Resolved tag, which may also be the non-specific tag `'!'` or a\n * `'!local'` tag, or `null` if unresolvable.\n */\n tagName(source, onError) {\n if (source === '!')\n return '!'; // non-specific tag\n if (source[0] !== '!') {\n onError(`Not a valid tag: ${source}`);\n return null;\n }\n if (source[1] === '<') {\n const verbatim = source.slice(2, -1);\n if (verbatim === '!' || verbatim === '!!') {\n onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);\n return null;\n }\n if (source[source.length - 1] !== '>')\n onError('Verbatim tags must end with a >');\n return verbatim;\n }\n const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/);\n if (!suffix)\n onError(`The ${source} tag has no suffix`);\n const prefix = this.tags[handle];\n if (prefix)\n return prefix + decodeURIComponent(suffix);\n if (handle === '!')\n return source; // local tag\n onError(`Could not resolve tag: ${source}`);\n return null;\n }\n /**\n * Given a fully resolved tag, returns its printable string form,\n * taking into account current tag prefixes and defaults.\n */\n tagString(tag) {\n for (const [handle, prefix] of Object.entries(this.tags)) {\n if (tag.startsWith(prefix))\n return handle + escapeTagName(tag.substring(prefix.length));\n }\n return tag[0] === '!' ? tag : `!<${tag}>`;\n }\n toString(doc) {\n const lines = this.yaml.explicit\n ? [`%YAML ${this.yaml.version || '1.2'}`]\n : [];\n const tagEntries = Object.entries(this.tags);\n let tagNames;\n if (doc && tagEntries.length > 0 && Node.isNode(doc.contents)) {\n const tags = {};\n visit.visit(doc.contents, (_key, node) => {\n if (Node.isNode(node) && node.tag)\n tags[node.tag] = true;\n });\n tagNames = Object.keys(tags);\n }\n else\n tagNames = [];\n for (const [handle, prefix] of tagEntries) {\n if (handle === '!!' && prefix === 'tag:yaml.org,2002:')\n continue;\n if (!doc || tagNames.some(tn => tn.startsWith(prefix)))\n lines.push(`%TAG ${handle} ${prefix}`);\n }\n return lines.join('\\n');\n }\n}\nDirectives.defaultYaml = { explicit: false, version: '1.2' };\nDirectives.defaultTags = { '!!': 'tag:yaml.org,2002:' };\n\nexports.Directives = Directives;\n","'use strict';\n\nclass YAMLError extends Error {\n constructor(name, pos, code, message) {\n super();\n this.name = name;\n this.code = code;\n this.message = message;\n this.pos = pos;\n }\n}\nclass YAMLParseError extends YAMLError {\n constructor(pos, code, message) {\n super('YAMLParseError', pos, code, message);\n }\n}\nclass YAMLWarning extends YAMLError {\n constructor(pos, code, message) {\n super('YAMLWarning', pos, code, message);\n }\n}\nconst prettifyError = (src, lc) => (error) => {\n if (error.pos[0] === -1)\n return;\n error.linePos = error.pos.map(pos => lc.linePos(pos));\n const { line, col } = error.linePos[0];\n error.message += ` at line ${line}, column ${col}`;\n let ci = col - 1;\n let lineStr = src\n .substring(lc.lineStarts[line - 1], lc.lineStarts[line])\n .replace(/[\\n\\r]+$/, '');\n // Trim to max 80 chars, keeping col position near the middle\n if (ci >= 60 && lineStr.length > 80) {\n const trimStart = Math.min(ci - 39, lineStr.length - 79);\n lineStr = '…' + lineStr.substring(trimStart);\n ci -= trimStart - 1;\n }\n if (lineStr.length > 80)\n lineStr = lineStr.substring(0, 79) + '…';\n // Include previous line in context if pointing at line start\n if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {\n // Regexp won't match if start is trimmed\n let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);\n if (prev.length > 80)\n prev = prev.substring(0, 79) + '…\\n';\n lineStr = prev + lineStr;\n }\n if (/[^ ]/.test(lineStr)) {\n let count = 1;\n const end = error.linePos[1];\n if (end && end.line === line && end.col > col) {\n count = Math.min(end.col - col, 80 - ci);\n }\n const pointer = ' '.repeat(ci) + '^'.repeat(count);\n error.message += `:\\n\\n${lineStr}\\n${pointer}\\n`;\n }\n};\n\nexports.YAMLError = YAMLError;\nexports.YAMLParseError = YAMLParseError;\nexports.YAMLWarning = YAMLWarning;\nexports.prettifyError = prettifyError;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar Schema = require('./schema/Schema.js');\nvar errors = require('./errors.js');\nvar Alias = require('./nodes/Alias.js');\nvar Node = require('./nodes/Node.js');\nvar Pair = require('./nodes/Pair.js');\nvar Scalar = require('./nodes/Scalar.js');\nvar YAMLMap = require('./nodes/YAMLMap.js');\nvar YAMLSeq = require('./nodes/YAMLSeq.js');\nvar cst = require('./parse/cst.js');\nvar lexer = require('./parse/lexer.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\nvar publicApi = require('./public-api.js');\nvar visit = require('./visit.js');\n\n\n\nexports.Composer = composer.Composer;\nexports.Document = Document.Document;\nexports.Schema = Schema.Schema;\nexports.YAMLError = errors.YAMLError;\nexports.YAMLParseError = errors.YAMLParseError;\nexports.YAMLWarning = errors.YAMLWarning;\nexports.Alias = Alias.Alias;\nexports.isAlias = Node.isAlias;\nexports.isCollection = Node.isCollection;\nexports.isDocument = Node.isDocument;\nexports.isMap = Node.isMap;\nexports.isNode = Node.isNode;\nexports.isPair = Node.isPair;\nexports.isScalar = Node.isScalar;\nexports.isSeq = Node.isSeq;\nexports.Pair = Pair.Pair;\nexports.Scalar = Scalar.Scalar;\nexports.YAMLMap = YAMLMap.YAMLMap;\nexports.YAMLSeq = YAMLSeq.YAMLSeq;\nexports.CST = cst;\nexports.Lexer = lexer.Lexer;\nexports.LineCounter = lineCounter.LineCounter;\nexports.Parser = parser.Parser;\nexports.parse = publicApi.parse;\nexports.parseAllDocuments = publicApi.parseAllDocuments;\nexports.parseDocument = publicApi.parseDocument;\nexports.stringify = publicApi.stringify;\nexports.visit = visit.visit;\nexports.visitAsync = visit.visitAsync;\n","'use strict';\n\nfunction debug(logLevel, ...messages) {\n if (logLevel === 'debug')\n console.log(...messages);\n}\nfunction warn(logLevel, warning) {\n if (logLevel === 'debug' || logLevel === 'warn') {\n if (typeof process !== 'undefined' && process.emitWarning)\n process.emitWarning(warning);\n else\n console.warn(warning);\n }\n}\n\nexports.debug = debug;\nexports.warn = warn;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar visit = require('../visit.js');\nvar Node = require('./Node.js');\n\nclass Alias extends Node.NodeBase {\n constructor(source) {\n super(Node.ALIAS);\n this.source = source;\n Object.defineProperty(this, 'tag', {\n set() {\n throw new Error('Alias nodes cannot have tags');\n }\n });\n }\n /**\n * Resolve the value of this alias within `doc`, finding the last\n * instance of the `source` anchor before this node.\n */\n resolve(doc) {\n let found = undefined;\n visit.visit(doc, {\n Node: (_key, node) => {\n if (node === this)\n return visit.visit.BREAK;\n if (node.anchor === this.source)\n found = node;\n }\n });\n return found;\n }\n toJSON(_arg, ctx) {\n if (!ctx)\n return { source: this.source };\n const { anchors, doc, maxAliasCount } = ctx;\n const source = this.resolve(doc);\n if (!source) {\n const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n throw new ReferenceError(msg);\n }\n const data = anchors.get(source);\n /* istanbul ignore if */\n if (!data || data.res === undefined) {\n const msg = 'This should not happen: Alias anchor was not resolved?';\n throw new ReferenceError(msg);\n }\n if (maxAliasCount >= 0) {\n data.count += 1;\n if (data.aliasCount === 0)\n data.aliasCount = getAliasCount(doc, source, anchors);\n if (data.count * data.aliasCount > maxAliasCount) {\n const msg = 'Excessive alias count indicates a resource exhaustion attack';\n throw new ReferenceError(msg);\n }\n }\n return data.res;\n }\n toString(ctx, _onComment, _onChompKeep) {\n const src = `*${this.source}`;\n if (ctx) {\n anchors.anchorIsValid(this.source);\n if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {\n const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n throw new Error(msg);\n }\n if (ctx.implicitKey)\n return `${src} `;\n }\n return src;\n }\n}\nfunction getAliasCount(doc, node, anchors) {\n if (Node.isAlias(node)) {\n const source = node.resolve(doc);\n const anchor = anchors && source && anchors.get(source);\n return anchor ? anchor.count * anchor.aliasCount : 0;\n }\n else if (Node.isCollection(node)) {\n let count = 0;\n for (const item of node.items) {\n const c = getAliasCount(doc, item, anchors);\n if (c > count)\n count = c;\n }\n return count;\n }\n else if (Node.isPair(node)) {\n const kc = getAliasCount(doc, node.key, anchors);\n const vc = getAliasCount(doc, node.value, anchors);\n return Math.max(kc, vc);\n }\n return 1;\n}\n\nexports.Alias = Alias;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar Node = require('./Node.js');\n\nfunction collectionFromPath(schema, path, value) {\n let v = value;\n for (let i = path.length - 1; i >= 0; --i) {\n const k = path[i];\n if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {\n const a = [];\n a[k] = v;\n v = a;\n }\n else {\n v = new Map([[k, v]]);\n }\n }\n return createNode.createNode(v, undefined, {\n aliasDuplicateObjects: false,\n keepUndefined: false,\n onAnchor: () => {\n throw new Error('This should not happen, please report a bug.');\n },\n schema,\n sourceObjects: new Map()\n });\n}\n// Type guard is intentionally a little wrong so as to be more useful,\n// as it does not cover untypable empty non-string iterables (e.g. []).\nconst isEmptyPath = (path) => path == null ||\n (typeof path === 'object' && !!path[Symbol.iterator]().next().done);\nclass Collection extends Node.NodeBase {\n constructor(type, schema) {\n super(type);\n Object.defineProperty(this, 'schema', {\n value: schema,\n configurable: true,\n enumerable: false,\n writable: true\n });\n }\n /**\n * Create a copy of this collection.\n *\n * @param schema - If defined, overwrites the original's schema\n */\n clone(schema) {\n const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n if (schema)\n copy.schema = schema;\n copy.items = copy.items.map(it => Node.isNode(it) || Node.isPair(it) ? it.clone(schema) : it);\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /**\n * Adds a value to the collection. For `!!map` and `!!omap` the value must\n * be a Pair instance or a `{ key, value }` object, which may not have a key\n * that already exists in the map.\n */\n addIn(path, value) {\n if (isEmptyPath(path))\n this.add(value);\n else {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (Node.isCollection(node))\n node.addIn(rest, value);\n else if (node === undefined && this.schema)\n this.set(key, collectionFromPath(this.schema, rest, value));\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n /**\n * Removes a value from the collection.\n * @returns `true` if the item was found and removed.\n */\n deleteIn(path) {\n const [key, ...rest] = path;\n if (rest.length === 0)\n return this.delete(key);\n const node = this.get(key, true);\n if (Node.isCollection(node))\n return node.deleteIn(rest);\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n /**\n * Returns item at `key`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n getIn(path, keepScalar) {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (rest.length === 0)\n return !keepScalar && Node.isScalar(node) ? node.value : node;\n else\n return Node.isCollection(node) ? node.getIn(rest, keepScalar) : undefined;\n }\n hasAllNullValues(allowScalar) {\n return this.items.every(node => {\n if (!Node.isPair(node))\n return false;\n const n = node.value;\n return (n == null ||\n (allowScalar &&\n Node.isScalar(n) &&\n n.value == null &&\n !n.commentBefore &&\n !n.comment &&\n !n.tag));\n });\n }\n /**\n * Checks if the collection includes a value with the key `key`.\n */\n hasIn(path) {\n const [key, ...rest] = path;\n if (rest.length === 0)\n return this.has(key);\n const node = this.get(key, true);\n return Node.isCollection(node) ? node.hasIn(rest) : false;\n }\n /**\n * Sets a value in this collection. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n setIn(path, value) {\n const [key, ...rest] = path;\n if (rest.length === 0) {\n this.set(key, value);\n }\n else {\n const node = this.get(key, true);\n if (Node.isCollection(node))\n node.setIn(rest, value);\n else if (node === undefined && this.schema)\n this.set(key, collectionFromPath(this.schema, rest, value));\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n}\nCollection.maxFlowStringSingleLineLength = 60;\n\nexports.Collection = Collection;\nexports.collectionFromPath = collectionFromPath;\nexports.isEmptyPath = isEmptyPath;\n","'use strict';\n\nconst ALIAS = Symbol.for('yaml.alias');\nconst DOC = Symbol.for('yaml.document');\nconst MAP = Symbol.for('yaml.map');\nconst PAIR = Symbol.for('yaml.pair');\nconst SCALAR = Symbol.for('yaml.scalar');\nconst SEQ = Symbol.for('yaml.seq');\nconst NODE_TYPE = Symbol.for('yaml.node.type');\nconst isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;\nconst isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;\nconst isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;\nconst isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;\nconst isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR;\nconst isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;\nfunction isCollection(node) {\n if (node && typeof node === 'object')\n switch (node[NODE_TYPE]) {\n case MAP:\n case SEQ:\n return true;\n }\n return false;\n}\nfunction isNode(node) {\n if (node && typeof node === 'object')\n switch (node[NODE_TYPE]) {\n case ALIAS:\n case MAP:\n case SCALAR:\n case SEQ:\n return true;\n }\n return false;\n}\nconst hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor;\nclass NodeBase {\n constructor(type) {\n Object.defineProperty(this, NODE_TYPE, { value: type });\n }\n /** Create a copy of this node. */\n clone() {\n const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n}\n\nexports.ALIAS = ALIAS;\nexports.DOC = DOC;\nexports.MAP = MAP;\nexports.NODE_TYPE = NODE_TYPE;\nexports.NodeBase = NodeBase;\nexports.PAIR = PAIR;\nexports.SCALAR = SCALAR;\nexports.SEQ = SEQ;\nexports.hasAnchor = hasAnchor;\nexports.isAlias = isAlias;\nexports.isCollection = isCollection;\nexports.isDocument = isDocument;\nexports.isMap = isMap;\nexports.isNode = isNode;\nexports.isPair = isPair;\nexports.isScalar = isScalar;\nexports.isSeq = isSeq;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar stringifyPair = require('../stringify/stringifyPair.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar Node = require('./Node.js');\n\nfunction createPair(key, value, ctx) {\n const k = createNode.createNode(key, undefined, ctx);\n const v = createNode.createNode(value, undefined, ctx);\n return new Pair(k, v);\n}\nclass Pair {\n constructor(key, value = null) {\n Object.defineProperty(this, Node.NODE_TYPE, { value: Node.PAIR });\n this.key = key;\n this.value = value;\n }\n clone(schema) {\n let { key, value } = this;\n if (Node.isNode(key))\n key = key.clone(schema);\n if (Node.isNode(value))\n value = value.clone(schema);\n return new Pair(key, value);\n }\n toJSON(_, ctx) {\n const pair = ctx?.mapAsMap ? new Map() : {};\n return addPairToJSMap.addPairToJSMap(ctx, pair, this);\n }\n toString(ctx, onComment, onChompKeep) {\n return ctx?.doc\n ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep)\n : JSON.stringify(this);\n }\n}\n\nexports.Pair = Pair;\nexports.createPair = createPair;\n","'use strict';\n\nvar Node = require('./Node.js');\nvar toJS = require('./toJS.js');\n\nconst isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');\nclass Scalar extends Node.NodeBase {\n constructor(value) {\n super(Node.SCALAR);\n this.value = value;\n }\n toJSON(arg, ctx) {\n return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx);\n }\n toString() {\n return String(this.value);\n }\n}\nScalar.BLOCK_FOLDED = 'BLOCK_FOLDED';\nScalar.BLOCK_LITERAL = 'BLOCK_LITERAL';\nScalar.PLAIN = 'PLAIN';\nScalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';\nScalar.QUOTE_SINGLE = 'QUOTE_SINGLE';\n\nexports.Scalar = Scalar;\nexports.isScalarValue = isScalarValue;\n","'use strict';\n\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar Collection = require('./Collection.js');\nvar Node = require('./Node.js');\nvar Pair = require('./Pair.js');\nvar Scalar = require('./Scalar.js');\n\nfunction findPair(items, key) {\n const k = Node.isScalar(key) ? key.value : key;\n for (const it of items) {\n if (Node.isPair(it)) {\n if (it.key === key || it.key === k)\n return it;\n if (Node.isScalar(it.key) && it.key.value === k)\n return it;\n }\n }\n return undefined;\n}\nclass YAMLMap extends Collection.Collection {\n static get tagName() {\n return 'tag:yaml.org,2002:map';\n }\n constructor(schema) {\n super(Node.MAP, schema);\n this.items = [];\n }\n /**\n * Adds a value to the collection.\n *\n * @param overwrite - If not set `true`, using a key that is already in the\n * collection will throw. Otherwise, overwrites the previous value.\n */\n add(pair, overwrite) {\n let _pair;\n if (Node.isPair(pair))\n _pair = pair;\n else if (!pair || typeof pair !== 'object' || !('key' in pair)) {\n // In TypeScript, this never happens.\n _pair = new Pair.Pair(pair, pair?.value);\n }\n else\n _pair = new Pair.Pair(pair.key, pair.value);\n const prev = findPair(this.items, _pair.key);\n const sortEntries = this.schema?.sortMapEntries;\n if (prev) {\n if (!overwrite)\n throw new Error(`Key ${_pair.key} already set`);\n // For scalars, keep the old node & its comments and anchors\n if (Node.isScalar(prev.value) && Scalar.isScalarValue(_pair.value))\n prev.value.value = _pair.value;\n else\n prev.value = _pair.value;\n }\n else if (sortEntries) {\n const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);\n if (i === -1)\n this.items.push(_pair);\n else\n this.items.splice(i, 0, _pair);\n }\n else {\n this.items.push(_pair);\n }\n }\n delete(key) {\n const it = findPair(this.items, key);\n if (!it)\n return false;\n const del = this.items.splice(this.items.indexOf(it), 1);\n return del.length > 0;\n }\n get(key, keepScalar) {\n const it = findPair(this.items, key);\n const node = it?.value;\n return (!keepScalar && Node.isScalar(node) ? node.value : node) ?? undefined;\n }\n has(key) {\n return !!findPair(this.items, key);\n }\n set(key, value) {\n this.add(new Pair.Pair(key, value), true);\n }\n /**\n * @param ctx - Conversion context, originally set in Document#toJS()\n * @param {Class} Type - If set, forces the returned collection type\n * @returns Instance of Type, Map, or Object\n */\n toJSON(_, ctx, Type) {\n const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {};\n if (ctx?.onCreate)\n ctx.onCreate(map);\n for (const item of this.items)\n addPairToJSMap.addPairToJSMap(ctx, map, item);\n return map;\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n for (const item of this.items) {\n if (!Node.isPair(item))\n throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);\n }\n if (!ctx.allNullValues && this.hasAllNullValues(false))\n ctx = Object.assign({}, ctx, { allNullValues: true });\n return stringifyCollection.stringifyCollection(this, ctx, {\n blockItemPrefix: '',\n flowChars: { start: '{', end: '}' },\n itemIndent: ctx.indent || '',\n onChompKeep,\n onComment\n });\n }\n}\n\nexports.YAMLMap = YAMLMap;\nexports.findPair = findPair;\n","'use strict';\n\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar Collection = require('./Collection.js');\nvar Node = require('./Node.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nclass YAMLSeq extends Collection.Collection {\n static get tagName() {\n return 'tag:yaml.org,2002:seq';\n }\n constructor(schema) {\n super(Node.SEQ, schema);\n this.items = [];\n }\n add(value) {\n this.items.push(value);\n }\n /**\n * Removes a value from the collection.\n *\n * `key` must contain a representation of an integer for this to succeed.\n * It may be wrapped in a `Scalar`.\n *\n * @returns `true` if the item was found and removed.\n */\n delete(key) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n return false;\n const del = this.items.splice(idx, 1);\n return del.length > 0;\n }\n get(key, keepScalar) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n return undefined;\n const it = this.items[idx];\n return !keepScalar && Node.isScalar(it) ? it.value : it;\n }\n /**\n * Checks if the collection includes a value with the key `key`.\n *\n * `key` must contain a representation of an integer for this to succeed.\n * It may be wrapped in a `Scalar`.\n */\n has(key) {\n const idx = asItemIndex(key);\n return typeof idx === 'number' && idx < this.items.length;\n }\n /**\n * Sets a value in this collection. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n *\n * If `key` does not contain a representation of an integer, this will throw.\n * It may be wrapped in a `Scalar`.\n */\n set(key, value) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n throw new Error(`Expected a valid index, not ${key}.`);\n const prev = this.items[idx];\n if (Node.isScalar(prev) && Scalar.isScalarValue(value))\n prev.value = value;\n else\n this.items[idx] = value;\n }\n toJSON(_, ctx) {\n const seq = [];\n if (ctx?.onCreate)\n ctx.onCreate(seq);\n let i = 0;\n for (const item of this.items)\n seq.push(toJS.toJS(item, String(i++), ctx));\n return seq;\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n return stringifyCollection.stringifyCollection(this, ctx, {\n blockItemPrefix: '- ',\n flowChars: { start: '[', end: ']' },\n itemIndent: (ctx.indent || '') + ' ',\n onChompKeep,\n onComment\n });\n }\n}\nfunction asItemIndex(key) {\n let idx = Node.isScalar(key) ? key.value : key;\n if (idx && typeof idx === 'string')\n idx = Number(idx);\n return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0\n ? idx\n : null;\n}\n\nexports.YAMLSeq = YAMLSeq;\n","'use strict';\n\nvar log = require('../log.js');\nvar stringify = require('../stringify/stringify.js');\nvar Node = require('./Node.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nconst MERGE_KEY = '<<';\nfunction addPairToJSMap(ctx, map, { key, value }) {\n if (ctx?.doc.schema.merge && isMergeKey(key)) {\n value = Node.isAlias(value) ? value.resolve(ctx.doc) : value;\n if (Node.isSeq(value))\n for (const it of value.items)\n mergeToJSMap(ctx, map, it);\n else if (Array.isArray(value))\n for (const it of value)\n mergeToJSMap(ctx, map, it);\n else\n mergeToJSMap(ctx, map, value);\n }\n else {\n const jsKey = toJS.toJS(key, '', ctx);\n if (map instanceof Map) {\n map.set(jsKey, toJS.toJS(value, jsKey, ctx));\n }\n else if (map instanceof Set) {\n map.add(jsKey);\n }\n else {\n const stringKey = stringifyKey(key, jsKey, ctx);\n const jsValue = toJS.toJS(value, stringKey, ctx);\n if (stringKey in map)\n Object.defineProperty(map, stringKey, {\n value: jsValue,\n writable: true,\n enumerable: true,\n configurable: true\n });\n else\n map[stringKey] = jsValue;\n }\n }\n return map;\n}\nconst isMergeKey = (key) => key === MERGE_KEY ||\n (Node.isScalar(key) &&\n key.value === MERGE_KEY &&\n (!key.type || key.type === Scalar.Scalar.PLAIN));\n// If the value associated with a merge key is a single mapping node, each of\n// its key/value pairs is inserted into the current mapping, unless the key\n// already exists in it. If the value associated with the merge key is a\n// sequence, then this sequence is expected to contain mapping nodes and each\n// of these nodes is merged in turn according to its order in the sequence.\n// Keys in mapping nodes earlier in the sequence override keys specified in\n// later mapping nodes. -- http://yaml.org/type/merge.html\nfunction mergeToJSMap(ctx, map, value) {\n const source = ctx && Node.isAlias(value) ? value.resolve(ctx.doc) : value;\n if (!Node.isMap(source))\n throw new Error('Merge sources must be maps or map aliases');\n const srcMap = source.toJSON(null, ctx, Map);\n for (const [key, value] of srcMap) {\n if (map instanceof Map) {\n if (!map.has(key))\n map.set(key, value);\n }\n else if (map instanceof Set) {\n map.add(key);\n }\n else if (!Object.prototype.hasOwnProperty.call(map, key)) {\n Object.defineProperty(map, key, {\n value,\n writable: true,\n enumerable: true,\n configurable: true\n });\n }\n }\n return map;\n}\nfunction stringifyKey(key, jsKey, ctx) {\n if (jsKey === null)\n return '';\n if (typeof jsKey !== 'object')\n return String(jsKey);\n if (Node.isNode(key) && ctx && ctx.doc) {\n const strCtx = stringify.createStringifyContext(ctx.doc, {});\n strCtx.anchors = new Set();\n for (const node of ctx.anchors.keys())\n strCtx.anchors.add(node.anchor);\n strCtx.inFlow = true;\n strCtx.inStringifyKey = true;\n const strKey = key.toString(strCtx);\n if (!ctx.mapKeyWarned) {\n let jsonStr = JSON.stringify(strKey);\n if (jsonStr.length > 40)\n jsonStr = jsonStr.substring(0, 36) + '...\"';\n log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);\n ctx.mapKeyWarned = true;\n }\n return strKey;\n }\n return JSON.stringify(jsKey);\n}\n\nexports.addPairToJSMap = addPairToJSMap;\n","'use strict';\n\nvar Node = require('./Node.js');\n\n/**\n * Recursively convert any node or its contents to native JavaScript\n *\n * @param value - The input value\n * @param arg - If `value` defines a `toJSON()` method, use this\n * as its first argument\n * @param ctx - Conversion context, originally set in Document#toJS(). If\n * `{ keep: true }` is not set, output should be suitable for JSON\n * stringification.\n */\nfunction toJS(value, arg, ctx) {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n if (Array.isArray(value))\n return value.map((v, i) => toJS(v, String(i), ctx));\n if (value && typeof value.toJSON === 'function') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n if (!ctx || !Node.hasAnchor(value))\n return value.toJSON(arg, ctx);\n const data = { aliasCount: 0, count: 1, res: undefined };\n ctx.anchors.set(value, data);\n ctx.onCreate = res => {\n data.res = res;\n delete ctx.onCreate;\n };\n const res = value.toJSON(arg, ctx);\n if (ctx.onCreate)\n ctx.onCreate(res);\n return res;\n }\n if (typeof value === 'bigint' && !ctx?.keep)\n return Number(value);\n return value;\n}\n\nexports.toJS = toJS;\n","'use strict';\n\nvar resolveBlockScalar = require('../compose/resolve-block-scalar.js');\nvar resolveFlowScalar = require('../compose/resolve-flow-scalar.js');\nvar errors = require('../errors.js');\nvar stringifyString = require('../stringify/stringifyString.js');\n\nfunction resolveAsScalar(token, strict = true, onError) {\n if (token) {\n const _onError = (pos, code, message) => {\n const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;\n if (onError)\n onError(offset, code, message);\n else\n throw new errors.YAMLParseError([offset, offset + 1], code, message);\n };\n switch (token.type) {\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return resolveFlowScalar.resolveFlowScalar(token, strict, _onError);\n case 'block-scalar':\n return resolveBlockScalar.resolveBlockScalar(token, strict, _onError);\n }\n }\n return null;\n}\n/**\n * Create a new scalar token with `value`\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.indent The indent level of the token.\n * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.\n * @param context.offset The offset position of the token.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction createScalarToken(value, context) {\n const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;\n const source = stringifyString.stringifyString({ type, value }, {\n implicitKey,\n indent: indent > 0 ? ' '.repeat(indent) : '',\n inFlow,\n options: { blockQuote: true, lineWidth: -1 }\n });\n const end = context.end ?? [\n { type: 'newline', offset: -1, indent, source: '\\n' }\n ];\n switch (source[0]) {\n case '|':\n case '>': {\n const he = source.indexOf('\\n');\n const head = source.substring(0, he);\n const body = source.substring(he + 1) + '\\n';\n const props = [\n { type: 'block-scalar-header', offset, indent, source: head }\n ];\n if (!addEndtoBlockProps(props, end))\n props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n return { type: 'block-scalar', offset, indent, props, source: body };\n }\n case '\"':\n return { type: 'double-quoted-scalar', offset, indent, source, end };\n case \"'\":\n return { type: 'single-quoted-scalar', offset, indent, source, end };\n default:\n return { type: 'scalar', offset, indent, source, end };\n }\n}\n/**\n * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.\n *\n * Best efforts are made to retain any comments previously associated with the `token`,\n * though all contents within a collection's `items` will be overwritten.\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.afterKey In most cases, values after a key should have an additional level of indentation.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction setScalarValue(token, value, context = {}) {\n let { afterKey = false, implicitKey = false, inFlow = false, type } = context;\n let indent = 'indent' in token ? token.indent : null;\n if (afterKey && typeof indent === 'number')\n indent += 2;\n if (!type)\n switch (token.type) {\n case 'single-quoted-scalar':\n type = 'QUOTE_SINGLE';\n break;\n case 'double-quoted-scalar':\n type = 'QUOTE_DOUBLE';\n break;\n case 'block-scalar': {\n const header = token.props[0];\n if (header.type !== 'block-scalar-header')\n throw new Error('Invalid block scalar header');\n type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';\n break;\n }\n default:\n type = 'PLAIN';\n }\n const source = stringifyString.stringifyString({ type, value }, {\n implicitKey: implicitKey || indent === null,\n indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',\n inFlow,\n options: { blockQuote: true, lineWidth: -1 }\n });\n switch (source[0]) {\n case '|':\n case '>':\n setBlockScalarValue(token, source);\n break;\n case '\"':\n setFlowScalarValue(token, source, 'double-quoted-scalar');\n break;\n case \"'\":\n setFlowScalarValue(token, source, 'single-quoted-scalar');\n break;\n default:\n setFlowScalarValue(token, source, 'scalar');\n }\n}\nfunction setBlockScalarValue(token, source) {\n const he = source.indexOf('\\n');\n const head = source.substring(0, he);\n const body = source.substring(he + 1) + '\\n';\n if (token.type === 'block-scalar') {\n const header = token.props[0];\n if (header.type !== 'block-scalar-header')\n throw new Error('Invalid block scalar header');\n header.source = head;\n token.source = body;\n }\n else {\n const { offset } = token;\n const indent = 'indent' in token ? token.indent : -1;\n const props = [\n { type: 'block-scalar-header', offset, indent, source: head }\n ];\n if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))\n props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n for (const key of Object.keys(token))\n if (key !== 'type' && key !== 'offset')\n delete token[key];\n Object.assign(token, { type: 'block-scalar', indent, props, source: body });\n }\n}\n/** @returns `true` if last token is a newline */\nfunction addEndtoBlockProps(props, end) {\n if (end)\n for (const st of end)\n switch (st.type) {\n case 'space':\n case 'comment':\n props.push(st);\n break;\n case 'newline':\n props.push(st);\n return true;\n }\n return false;\n}\nfunction setFlowScalarValue(token, source, type) {\n switch (token.type) {\n case 'scalar':\n case 'double-quoted-scalar':\n case 'single-quoted-scalar':\n token.type = type;\n token.source = source;\n break;\n case 'block-scalar': {\n const end = token.props.slice(1);\n let oa = source.length;\n if (token.props[0].type === 'block-scalar-header')\n oa -= token.props[0].source.length;\n for (const tok of end)\n tok.offset += oa;\n delete token.props;\n Object.assign(token, { type, source, end });\n break;\n }\n case 'block-map':\n case 'block-seq': {\n const offset = token.offset + source.length;\n const nl = { type: 'newline', offset, indent: token.indent, source: '\\n' };\n delete token.items;\n Object.assign(token, { type, source, end: [nl] });\n break;\n }\n default: {\n const indent = 'indent' in token ? token.indent : -1;\n const end = 'end' in token && Array.isArray(token.end)\n ? token.end.filter(st => st.type === 'space' ||\n st.type === 'comment' ||\n st.type === 'newline')\n : [];\n for (const key of Object.keys(token))\n if (key !== 'type' && key !== 'offset')\n delete token[key];\n Object.assign(token, { type, indent, source, end });\n }\n }\n}\n\nexports.createScalarToken = createScalarToken;\nexports.resolveAsScalar = resolveAsScalar;\nexports.setScalarValue = setScalarValue;\n","'use strict';\n\n/**\n * Stringify a CST document, token, or collection item\n *\n * Fair warning: This applies no validation whatsoever, and\n * simply concatenates the sources in their logical order.\n */\nconst stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);\nfunction stringifyToken(token) {\n switch (token.type) {\n case 'block-scalar': {\n let res = '';\n for (const tok of token.props)\n res += stringifyToken(tok);\n return res + token.source;\n }\n case 'block-map':\n case 'block-seq': {\n let res = '';\n for (const item of token.items)\n res += stringifyItem(item);\n return res;\n }\n case 'flow-collection': {\n let res = token.start.source;\n for (const item of token.items)\n res += stringifyItem(item);\n for (const st of token.end)\n res += st.source;\n return res;\n }\n case 'document': {\n let res = stringifyItem(token);\n if (token.end)\n for (const st of token.end)\n res += st.source;\n return res;\n }\n default: {\n let res = token.source;\n if ('end' in token && token.end)\n for (const st of token.end)\n res += st.source;\n return res;\n }\n }\n}\nfunction stringifyItem({ start, key, sep, value }) {\n let res = '';\n for (const st of start)\n res += st.source;\n if (key)\n res += stringifyToken(key);\n if (sep)\n for (const st of sep)\n res += st.source;\n if (value)\n res += stringifyToken(value);\n return res;\n}\n\nexports.stringify = stringify;\n","'use strict';\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove item');\n/**\n * Apply a visitor to a CST document or item.\n *\n * Walks through the tree (depth-first) starting from the root, calling a\n * `visitor` function with two arguments when entering each item:\n * - `item`: The current item, which included the following members:\n * - `start: SourceToken[]` – Source tokens before the key or value,\n * possibly including its anchor or tag.\n * - `key?: Token | null` – Set for pair values. May then be `null`, if\n * the key before the `:` separator is empty.\n * - `sep?: SourceToken[]` – Source tokens between the key and the value,\n * which should include the `:` map value indicator if `value` is set.\n * - `value?: Token` – The value of a sequence item, or of a map pair.\n * - `path`: The steps from the root to the current node, as an array of\n * `['key' | 'value', number]` tuples.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this token, continue with\n * next sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current item, then continue with the next one\n * - `number`: Set the index of the next step. This is useful especially if\n * the index of the current token has changed.\n * - `function`: Define the next visitor for this item. After the original\n * visitor is called on item entry, next visitors are called after handling\n * a non-empty `key` and when exiting the item.\n */\nfunction visit(cst, visitor) {\n if ('type' in cst && cst.type === 'document')\n cst = { start: cst.start, value: cst.value };\n _visit(Object.freeze([]), cst, visitor);\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current item */\nvisit.SKIP = SKIP;\n/** Remove the current item */\nvisit.REMOVE = REMOVE;\n/** Find the item at `path` from `cst` as the root */\nvisit.itemAtPath = (cst, path) => {\n let item = cst;\n for (const [field, index] of path) {\n const tok = item?.[field];\n if (tok && 'items' in tok) {\n item = tok.items[index];\n }\n else\n return undefined;\n }\n return item;\n};\n/**\n * Get the immediate parent collection of the item at `path` from `cst` as the root.\n *\n * Throws an error if the collection is not found, which should never happen if the item itself exists.\n */\nvisit.parentCollection = (cst, path) => {\n const parent = visit.itemAtPath(cst, path.slice(0, -1));\n const field = path[path.length - 1][0];\n const coll = parent?.[field];\n if (coll && 'items' in coll)\n return coll;\n throw new Error('Parent collection not found');\n};\nfunction _visit(path, item, visitor) {\n let ctrl = visitor(item, path);\n if (typeof ctrl === 'symbol')\n return ctrl;\n for (const field of ['key', 'value']) {\n const token = item[field];\n if (token && 'items' in token) {\n for (let i = 0; i < token.items.length; ++i) {\n const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n token.items.splice(i, 1);\n i -= 1;\n }\n }\n if (typeof ctrl === 'function' && field === 'key')\n ctrl = ctrl(item, path);\n }\n }\n return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;\n}\n\nexports.visit = visit;\n","'use strict';\n\nvar cstScalar = require('./cst-scalar.js');\nvar cstStringify = require('./cst-stringify.js');\nvar cstVisit = require('./cst-visit.js');\n\n/** The byte order mark */\nconst BOM = '\\u{FEFF}';\n/** Start of doc-mode */\nconst DOCUMENT = '\\x02'; // C0: Start of Text\n/** Unexpected end of flow-mode */\nconst FLOW_END = '\\x18'; // C0: Cancel\n/** Next token is a scalar value */\nconst SCALAR = '\\x1f'; // C0: Unit Separator\n/** @returns `true` if `token` is a flow or block collection */\nconst isCollection = (token) => !!token && 'items' in token;\n/** @returns `true` if `token` is a flow or block scalar; not an alias */\nconst isScalar = (token) => !!token &&\n (token.type === 'scalar' ||\n token.type === 'single-quoted-scalar' ||\n token.type === 'double-quoted-scalar' ||\n token.type === 'block-scalar');\n/* istanbul ignore next */\n/** Get a printable representation of a lexer token */\nfunction prettyToken(token) {\n switch (token) {\n case BOM:\n return '';\n case DOCUMENT:\n return '';\n case FLOW_END:\n return '';\n case SCALAR:\n return '';\n default:\n return JSON.stringify(token);\n }\n}\n/** Identify the type of a lexer token. May return `null` for unknown tokens. */\nfunction tokenType(source) {\n switch (source) {\n case BOM:\n return 'byte-order-mark';\n case DOCUMENT:\n return 'doc-mode';\n case FLOW_END:\n return 'flow-error-end';\n case SCALAR:\n return 'scalar';\n case '---':\n return 'doc-start';\n case '...':\n return 'doc-end';\n case '':\n case '\\n':\n case '\\r\\n':\n return 'newline';\n case '-':\n return 'seq-item-ind';\n case '?':\n return 'explicit-key-ind';\n case ':':\n return 'map-value-ind';\n case '{':\n return 'flow-map-start';\n case '}':\n return 'flow-map-end';\n case '[':\n return 'flow-seq-start';\n case ']':\n return 'flow-seq-end';\n case ',':\n return 'comma';\n }\n switch (source[0]) {\n case ' ':\n case '\\t':\n return 'space';\n case '#':\n return 'comment';\n case '%':\n return 'directive-line';\n case '*':\n return 'alias';\n case '&':\n return 'anchor';\n case '!':\n return 'tag';\n case \"'\":\n return 'single-quoted-scalar';\n case '\"':\n return 'double-quoted-scalar';\n case '|':\n case '>':\n return 'block-scalar-header';\n }\n return null;\n}\n\nexports.createScalarToken = cstScalar.createScalarToken;\nexports.resolveAsScalar = cstScalar.resolveAsScalar;\nexports.setScalarValue = cstScalar.setScalarValue;\nexports.stringify = cstStringify.stringify;\nexports.visit = cstVisit.visit;\nexports.BOM = BOM;\nexports.DOCUMENT = DOCUMENT;\nexports.FLOW_END = FLOW_END;\nexports.SCALAR = SCALAR;\nexports.isCollection = isCollection;\nexports.isScalar = isScalar;\nexports.prettyToken = prettyToken;\nexports.tokenType = tokenType;\n","'use strict';\n\nvar cst = require('./cst.js');\n\n/*\nSTART -> stream\n\nstream\n directive -> line-end -> stream\n indent + line-end -> stream\n [else] -> line-start\n\nline-end\n comment -> line-end\n newline -> .\n input-end -> END\n\nline-start\n doc-start -> doc\n doc-end -> stream\n [else] -> indent -> block-start\n\nblock-start\n seq-item-start -> block-start\n explicit-key-start -> block-start\n map-value-start -> block-start\n [else] -> doc\n\ndoc\n line-end -> line-start\n spaces -> doc\n anchor -> doc\n tag -> doc\n flow-start -> flow -> doc\n flow-end -> error -> doc\n seq-item-start -> error -> doc\n explicit-key-start -> error -> doc\n map-value-start -> doc\n alias -> doc\n quote-start -> quoted-scalar -> doc\n block-scalar-header -> line-end -> block-scalar(min) -> line-start\n [else] -> plain-scalar(false, min) -> doc\n\nflow\n line-end -> flow\n spaces -> flow\n anchor -> flow\n tag -> flow\n flow-start -> flow -> flow\n flow-end -> .\n seq-item-start -> error -> flow\n explicit-key-start -> flow\n map-value-start -> flow\n alias -> flow\n quote-start -> quoted-scalar -> flow\n comma -> flow\n [else] -> plain-scalar(true, 0) -> flow\n\nquoted-scalar\n quote-end -> .\n [else] -> quoted-scalar\n\nblock-scalar(min)\n newline + peek(indent < min) -> .\n [else] -> block-scalar(min)\n\nplain-scalar(is-flow, min)\n scalar-end(is-flow) -> .\n peek(newline + (indent < min)) -> .\n [else] -> plain-scalar(min)\n*/\nfunction isEmpty(ch) {\n switch (ch) {\n case undefined:\n case ' ':\n case '\\n':\n case '\\r':\n case '\\t':\n return true;\n default:\n return false;\n }\n}\nconst hexDigits = '0123456789ABCDEFabcdef'.split('');\nconst tagChars = \"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()\".split('');\nconst invalidFlowScalarChars = ',[]{}'.split('');\nconst invalidAnchorChars = ' ,[]{}\\n\\r\\t'.split('');\nconst isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);\n/**\n * Splits an input string into lexical tokens, i.e. smaller strings that are\n * easily identifiable by `tokens.tokenType()`.\n *\n * Lexing starts always in a \"stream\" context. Incomplete input may be buffered\n * until a complete token can be emitted.\n *\n * In addition to slices of the original input, the following control characters\n * may also be emitted:\n *\n * - `\\x02` (Start of Text): A document starts with the next token\n * - `\\x18` (Cancel): Unexpected end of flow-mode (indicates an error)\n * - `\\x1f` (Unit Separator): Next token is a scalar value\n * - `\\u{FEFF}` (Byte order mark): Emitted separately outside documents\n */\nclass Lexer {\n constructor() {\n /**\n * Flag indicating whether the end of the current buffer marks the end of\n * all input\n */\n this.atEnd = false;\n /**\n * Explicit indent set in block scalar header, as an offset from the current\n * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not\n * explicitly set.\n */\n this.blockScalarIndent = -1;\n /**\n * Block scalars that include a + (keep) chomping indicator in their header\n * include trailing empty lines, which are otherwise excluded from the\n * scalar's contents.\n */\n this.blockScalarKeep = false;\n /** Current input */\n this.buffer = '';\n /**\n * Flag noting whether the map value indicator : can immediately follow this\n * node within a flow context.\n */\n this.flowKey = false;\n /** Count of surrounding flow collection levels. */\n this.flowLevel = 0;\n /**\n * Minimum level of indentation required for next lines to be parsed as a\n * part of the current scalar value.\n */\n this.indentNext = 0;\n /** Indentation level of the current line. */\n this.indentValue = 0;\n /** Position of the next \\n character. */\n this.lineEndPos = null;\n /** Stores the state of the lexer if reaching the end of incpomplete input */\n this.next = null;\n /** A pointer to `buffer`; the current position of the lexer. */\n this.pos = 0;\n }\n /**\n * Generate YAML tokens from the `source` string. If `incomplete`,\n * a part of the last line may be left as a buffer for the next call.\n *\n * @returns A generator of lexical tokens\n */\n *lex(source, incomplete = false) {\n if (source) {\n this.buffer = this.buffer ? this.buffer + source : source;\n this.lineEndPos = null;\n }\n this.atEnd = !incomplete;\n let next = this.next ?? 'stream';\n while (next && (incomplete || this.hasChars(1)))\n next = yield* this.parseNext(next);\n }\n atLineEnd() {\n let i = this.pos;\n let ch = this.buffer[i];\n while (ch === ' ' || ch === '\\t')\n ch = this.buffer[++i];\n if (!ch || ch === '#' || ch === '\\n')\n return true;\n if (ch === '\\r')\n return this.buffer[i + 1] === '\\n';\n return false;\n }\n charAt(n) {\n return this.buffer[this.pos + n];\n }\n continueScalar(offset) {\n let ch = this.buffer[offset];\n if (this.indentNext > 0) {\n let indent = 0;\n while (ch === ' ')\n ch = this.buffer[++indent + offset];\n if (ch === '\\r') {\n const next = this.buffer[indent + offset + 1];\n if (next === '\\n' || (!next && !this.atEnd))\n return offset + indent + 1;\n }\n return ch === '\\n' || indent >= this.indentNext || (!ch && !this.atEnd)\n ? offset + indent\n : -1;\n }\n if (ch === '-' || ch === '.') {\n const dt = this.buffer.substr(offset, 3);\n if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))\n return -1;\n }\n return offset;\n }\n getLine() {\n let end = this.lineEndPos;\n if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {\n end = this.buffer.indexOf('\\n', this.pos);\n this.lineEndPos = end;\n }\n if (end === -1)\n return this.atEnd ? this.buffer.substring(this.pos) : null;\n if (this.buffer[end - 1] === '\\r')\n end -= 1;\n return this.buffer.substring(this.pos, end);\n }\n hasChars(n) {\n return this.pos + n <= this.buffer.length;\n }\n setNext(state) {\n this.buffer = this.buffer.substring(this.pos);\n this.pos = 0;\n this.lineEndPos = null;\n this.next = state;\n return null;\n }\n peek(n) {\n return this.buffer.substr(this.pos, n);\n }\n *parseNext(next) {\n switch (next) {\n case 'stream':\n return yield* this.parseStream();\n case 'line-start':\n return yield* this.parseLineStart();\n case 'block-start':\n return yield* this.parseBlockStart();\n case 'doc':\n return yield* this.parseDocument();\n case 'flow':\n return yield* this.parseFlowCollection();\n case 'quoted-scalar':\n return yield* this.parseQuotedScalar();\n case 'block-scalar':\n return yield* this.parseBlockScalar();\n case 'plain-scalar':\n return yield* this.parsePlainScalar();\n }\n }\n *parseStream() {\n let line = this.getLine();\n if (line === null)\n return this.setNext('stream');\n if (line[0] === cst.BOM) {\n yield* this.pushCount(1);\n line = line.substring(1);\n }\n if (line[0] === '%') {\n let dirEnd = line.length;\n const cs = line.indexOf('#');\n if (cs !== -1) {\n const ch = line[cs - 1];\n if (ch === ' ' || ch === '\\t')\n dirEnd = cs - 1;\n }\n while (true) {\n const ch = line[dirEnd - 1];\n if (ch === ' ' || ch === '\\t')\n dirEnd -= 1;\n else\n break;\n }\n const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));\n yield* this.pushCount(line.length - n); // possible comment\n this.pushNewline();\n return 'stream';\n }\n if (this.atLineEnd()) {\n const sp = yield* this.pushSpaces(true);\n yield* this.pushCount(line.length - sp);\n yield* this.pushNewline();\n return 'stream';\n }\n yield cst.DOCUMENT;\n return yield* this.parseLineStart();\n }\n *parseLineStart() {\n const ch = this.charAt(0);\n if (!ch && !this.atEnd)\n return this.setNext('line-start');\n if (ch === '-' || ch === '.') {\n if (!this.atEnd && !this.hasChars(4))\n return this.setNext('line-start');\n const s = this.peek(3);\n if (s === '---' && isEmpty(this.charAt(3))) {\n yield* this.pushCount(3);\n this.indentValue = 0;\n this.indentNext = 0;\n return 'doc';\n }\n else if (s === '...' && isEmpty(this.charAt(3))) {\n yield* this.pushCount(3);\n return 'stream';\n }\n }\n this.indentValue = yield* this.pushSpaces(false);\n if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))\n this.indentNext = this.indentValue;\n return yield* this.parseBlockStart();\n }\n *parseBlockStart() {\n const [ch0, ch1] = this.peek(2);\n if (!ch1 && !this.atEnd)\n return this.setNext('block-start');\n if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {\n const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));\n this.indentNext = this.indentValue + 1;\n this.indentValue += n;\n return yield* this.parseBlockStart();\n }\n return 'doc';\n }\n *parseDocument() {\n yield* this.pushSpaces(true);\n const line = this.getLine();\n if (line === null)\n return this.setNext('doc');\n let n = yield* this.pushIndicators();\n switch (line[n]) {\n case '#':\n yield* this.pushCount(line.length - n);\n // fallthrough\n case undefined:\n yield* this.pushNewline();\n return yield* this.parseLineStart();\n case '{':\n case '[':\n yield* this.pushCount(1);\n this.flowKey = false;\n this.flowLevel = 1;\n return 'flow';\n case '}':\n case ']':\n // this is an error\n yield* this.pushCount(1);\n return 'doc';\n case '*':\n yield* this.pushUntil(isNotAnchorChar);\n return 'doc';\n case '\"':\n case \"'\":\n return yield* this.parseQuotedScalar();\n case '|':\n case '>':\n n += yield* this.parseBlockScalarHeader();\n n += yield* this.pushSpaces(true);\n yield* this.pushCount(line.length - n);\n yield* this.pushNewline();\n return yield* this.parseBlockScalar();\n default:\n return yield* this.parsePlainScalar();\n }\n }\n *parseFlowCollection() {\n let nl, sp;\n let indent = -1;\n do {\n nl = yield* this.pushNewline();\n if (nl > 0) {\n sp = yield* this.pushSpaces(false);\n this.indentValue = indent = sp;\n }\n else {\n sp = 0;\n }\n sp += yield* this.pushSpaces(true);\n } while (nl + sp > 0);\n const line = this.getLine();\n if (line === null)\n return this.setNext('flow');\n if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||\n (indent === 0 &&\n (line.startsWith('---') || line.startsWith('...')) &&\n isEmpty(line[3]))) {\n // Allowing for the terminal ] or } at the same (rather than greater)\n // indent level as the initial [ or { is technically invalid, but\n // failing here would be surprising to users.\n const atFlowEndMarker = indent === this.indentNext - 1 &&\n this.flowLevel === 1 &&\n (line[0] === ']' || line[0] === '}');\n if (!atFlowEndMarker) {\n // this is an error\n this.flowLevel = 0;\n yield cst.FLOW_END;\n return yield* this.parseLineStart();\n }\n }\n let n = 0;\n while (line[n] === ',') {\n n += yield* this.pushCount(1);\n n += yield* this.pushSpaces(true);\n this.flowKey = false;\n }\n n += yield* this.pushIndicators();\n switch (line[n]) {\n case undefined:\n return 'flow';\n case '#':\n yield* this.pushCount(line.length - n);\n return 'flow';\n case '{':\n case '[':\n yield* this.pushCount(1);\n this.flowKey = false;\n this.flowLevel += 1;\n return 'flow';\n case '}':\n case ']':\n yield* this.pushCount(1);\n this.flowKey = true;\n this.flowLevel -= 1;\n return this.flowLevel ? 'flow' : 'doc';\n case '*':\n yield* this.pushUntil(isNotAnchorChar);\n return 'flow';\n case '\"':\n case \"'\":\n this.flowKey = true;\n return yield* this.parseQuotedScalar();\n case ':': {\n const next = this.charAt(1);\n if (this.flowKey || isEmpty(next) || next === ',') {\n this.flowKey = false;\n yield* this.pushCount(1);\n yield* this.pushSpaces(true);\n return 'flow';\n }\n }\n // fallthrough\n default:\n this.flowKey = false;\n return yield* this.parsePlainScalar();\n }\n }\n *parseQuotedScalar() {\n const quote = this.charAt(0);\n let end = this.buffer.indexOf(quote, this.pos + 1);\n if (quote === \"'\") {\n while (end !== -1 && this.buffer[end + 1] === \"'\")\n end = this.buffer.indexOf(\"'\", end + 2);\n }\n else {\n // double-quote\n while (end !== -1) {\n let n = 0;\n while (this.buffer[end - 1 - n] === '\\\\')\n n += 1;\n if (n % 2 === 0)\n break;\n end = this.buffer.indexOf('\"', end + 1);\n }\n }\n // Only looking for newlines within the quotes\n const qb = this.buffer.substring(0, end);\n let nl = qb.indexOf('\\n', this.pos);\n if (nl !== -1) {\n while (nl !== -1) {\n const cs = this.continueScalar(nl + 1);\n if (cs === -1)\n break;\n nl = qb.indexOf('\\n', cs);\n }\n if (nl !== -1) {\n // this is an error caused by an unexpected unindent\n end = nl - (qb[nl - 1] === '\\r' ? 2 : 1);\n }\n }\n if (end === -1) {\n if (!this.atEnd)\n return this.setNext('quoted-scalar');\n end = this.buffer.length;\n }\n yield* this.pushToIndex(end + 1, false);\n return this.flowLevel ? 'flow' : 'doc';\n }\n *parseBlockScalarHeader() {\n this.blockScalarIndent = -1;\n this.blockScalarKeep = false;\n let i = this.pos;\n while (true) {\n const ch = this.buffer[++i];\n if (ch === '+')\n this.blockScalarKeep = true;\n else if (ch > '0' && ch <= '9')\n this.blockScalarIndent = Number(ch) - 1;\n else if (ch !== '-')\n break;\n }\n return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');\n }\n *parseBlockScalar() {\n let nl = this.pos - 1; // may be -1 if this.pos === 0\n let indent = 0;\n let ch;\n loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {\n switch (ch) {\n case ' ':\n indent += 1;\n break;\n case '\\n':\n nl = i;\n indent = 0;\n break;\n case '\\r': {\n const next = this.buffer[i + 1];\n if (!next && !this.atEnd)\n return this.setNext('block-scalar');\n if (next === '\\n')\n break;\n } // fallthrough\n default:\n break loop;\n }\n }\n if (!ch && !this.atEnd)\n return this.setNext('block-scalar');\n if (indent >= this.indentNext) {\n if (this.blockScalarIndent === -1)\n this.indentNext = indent;\n else\n this.indentNext += this.blockScalarIndent;\n do {\n const cs = this.continueScalar(nl + 1);\n if (cs === -1)\n break;\n nl = this.buffer.indexOf('\\n', cs);\n } while (nl !== -1);\n if (nl === -1) {\n if (!this.atEnd)\n return this.setNext('block-scalar');\n nl = this.buffer.length;\n }\n }\n if (!this.blockScalarKeep) {\n do {\n let i = nl - 1;\n let ch = this.buffer[i];\n if (ch === '\\r')\n ch = this.buffer[--i];\n const lastChar = i; // Drop the line if last char not more indented\n while (ch === ' ' || ch === '\\t')\n ch = this.buffer[--i];\n if (ch === '\\n' && i >= this.pos && i + 1 + indent > lastChar)\n nl = i;\n else\n break;\n } while (true);\n }\n yield cst.SCALAR;\n yield* this.pushToIndex(nl + 1, true);\n return yield* this.parseLineStart();\n }\n *parsePlainScalar() {\n const inFlow = this.flowLevel > 0;\n let end = this.pos - 1;\n let i = this.pos - 1;\n let ch;\n while ((ch = this.buffer[++i])) {\n if (ch === ':') {\n const next = this.buffer[i + 1];\n if (isEmpty(next) || (inFlow && next === ','))\n break;\n end = i;\n }\n else if (isEmpty(ch)) {\n let next = this.buffer[i + 1];\n if (ch === '\\r') {\n if (next === '\\n') {\n i += 1;\n ch = '\\n';\n next = this.buffer[i + 1];\n }\n else\n end = i;\n }\n if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))\n break;\n if (ch === '\\n') {\n const cs = this.continueScalar(i + 1);\n if (cs === -1)\n break;\n i = Math.max(i, cs - 2); // to advance, but still account for ' #'\n }\n }\n else {\n if (inFlow && invalidFlowScalarChars.includes(ch))\n break;\n end = i;\n }\n }\n if (!ch && !this.atEnd)\n return this.setNext('plain-scalar');\n yield cst.SCALAR;\n yield* this.pushToIndex(end + 1, true);\n return inFlow ? 'flow' : 'doc';\n }\n *pushCount(n) {\n if (n > 0) {\n yield this.buffer.substr(this.pos, n);\n this.pos += n;\n return n;\n }\n return 0;\n }\n *pushToIndex(i, allowEmpty) {\n const s = this.buffer.slice(this.pos, i);\n if (s) {\n yield s;\n this.pos += s.length;\n return s.length;\n }\n else if (allowEmpty)\n yield '';\n return 0;\n }\n *pushIndicators() {\n switch (this.charAt(0)) {\n case '!':\n return ((yield* this.pushTag()) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n case '&':\n return ((yield* this.pushUntil(isNotAnchorChar)) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n case '-': // this is an error\n case '?': // this is an error outside flow collections\n case ':': {\n const inFlow = this.flowLevel > 0;\n const ch1 = this.charAt(1);\n if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {\n if (!inFlow)\n this.indentNext = this.indentValue + 1;\n else if (this.flowKey)\n this.flowKey = false;\n return ((yield* this.pushCount(1)) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n }\n }\n }\n return 0;\n }\n *pushTag() {\n if (this.charAt(1) === '<') {\n let i = this.pos + 2;\n let ch = this.buffer[i];\n while (!isEmpty(ch) && ch !== '>')\n ch = this.buffer[++i];\n return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);\n }\n else {\n let i = this.pos + 1;\n let ch = this.buffer[i];\n while (ch) {\n if (tagChars.includes(ch))\n ch = this.buffer[++i];\n else if (ch === '%' &&\n hexDigits.includes(this.buffer[i + 1]) &&\n hexDigits.includes(this.buffer[i + 2])) {\n ch = this.buffer[(i += 3)];\n }\n else\n break;\n }\n return yield* this.pushToIndex(i, false);\n }\n }\n *pushNewline() {\n const ch = this.buffer[this.pos];\n if (ch === '\\n')\n return yield* this.pushCount(1);\n else if (ch === '\\r' && this.charAt(1) === '\\n')\n return yield* this.pushCount(2);\n else\n return 0;\n }\n *pushSpaces(allowTabs) {\n let i = this.pos - 1;\n let ch;\n do {\n ch = this.buffer[++i];\n } while (ch === ' ' || (allowTabs && ch === '\\t'));\n const n = i - this.pos;\n if (n > 0) {\n yield this.buffer.substr(this.pos, n);\n this.pos = i;\n }\n return n;\n }\n *pushUntil(test) {\n let i = this.pos;\n let ch = this.buffer[i];\n while (!test(ch))\n ch = this.buffer[++i];\n return yield* this.pushToIndex(i, false);\n }\n}\n\nexports.Lexer = Lexer;\n","'use strict';\n\n/**\n * Tracks newlines during parsing in order to provide an efficient API for\n * determining the one-indexed `{ line, col }` position for any offset\n * within the input.\n */\nclass LineCounter {\n constructor() {\n this.lineStarts = [];\n /**\n * Should be called in ascending order. Otherwise, call\n * `lineCounter.lineStarts.sort()` before calling `linePos()`.\n */\n this.addNewLine = (offset) => this.lineStarts.push(offset);\n /**\n * Performs a binary search and returns the 1-indexed { line, col }\n * position of `offset`. If `line === 0`, `addNewLine` has never been\n * called or `offset` is before the first known newline.\n */\n this.linePos = (offset) => {\n let low = 0;\n let high = this.lineStarts.length;\n while (low < high) {\n const mid = (low + high) >> 1; // Math.floor((low + high) / 2)\n if (this.lineStarts[mid] < offset)\n low = mid + 1;\n else\n high = mid;\n }\n if (this.lineStarts[low] === offset)\n return { line: low + 1, col: 1 };\n if (low === 0)\n return { line: 0, col: offset };\n const start = this.lineStarts[low - 1];\n return { line: low, col: offset - start + 1 };\n };\n }\n}\n\nexports.LineCounter = LineCounter;\n","'use strict';\n\nvar cst = require('./cst.js');\nvar lexer = require('./lexer.js');\n\nfunction includesToken(list, type) {\n for (let i = 0; i < list.length; ++i)\n if (list[i].type === type)\n return true;\n return false;\n}\nfunction findNonEmptyIndex(list) {\n for (let i = 0; i < list.length; ++i) {\n switch (list[i].type) {\n case 'space':\n case 'comment':\n case 'newline':\n break;\n default:\n return i;\n }\n }\n return -1;\n}\nfunction isFlowToken(token) {\n switch (token?.type) {\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n case 'flow-collection':\n return true;\n default:\n return false;\n }\n}\nfunction getPrevProps(parent) {\n switch (parent.type) {\n case 'document':\n return parent.start;\n case 'block-map': {\n const it = parent.items[parent.items.length - 1];\n return it.sep ?? it.start;\n }\n case 'block-seq':\n return parent.items[parent.items.length - 1].start;\n /* istanbul ignore next should not happen */\n default:\n return [];\n }\n}\n/** Note: May modify input array */\nfunction getFirstKeyStartProps(prev) {\n if (prev.length === 0)\n return [];\n let i = prev.length;\n loop: while (--i >= 0) {\n switch (prev[i].type) {\n case 'doc-start':\n case 'explicit-key-ind':\n case 'map-value-ind':\n case 'seq-item-ind':\n case 'newline':\n break loop;\n }\n }\n while (prev[++i]?.type === 'space') {\n /* loop */\n }\n return prev.splice(i, prev.length);\n}\nfunction fixFlowSeqItems(fc) {\n if (fc.start.type === 'flow-seq-start') {\n for (const it of fc.items) {\n if (it.sep &&\n !it.value &&\n !includesToken(it.start, 'explicit-key-ind') &&\n !includesToken(it.sep, 'map-value-ind')) {\n if (it.key)\n it.value = it.key;\n delete it.key;\n if (isFlowToken(it.value)) {\n if (it.value.end)\n Array.prototype.push.apply(it.value.end, it.sep);\n else\n it.value.end = it.sep;\n }\n else\n Array.prototype.push.apply(it.start, it.sep);\n delete it.sep;\n }\n }\n }\n}\n/**\n * A YAML concrete syntax tree (CST) parser\n *\n * ```ts\n * const src: string = ...\n * for (const token of new Parser().parse(src)) {\n * // token: Token\n * }\n * ```\n *\n * To use the parser with a user-provided lexer:\n *\n * ```ts\n * function* parse(source: string, lexer: Lexer) {\n * const parser = new Parser()\n * for (const lexeme of lexer.lex(source))\n * yield* parser.next(lexeme)\n * yield* parser.end()\n * }\n *\n * const src: string = ...\n * const lexer = new Lexer()\n * for (const token of parse(src, lexer)) {\n * // token: Token\n * }\n * ```\n */\nclass Parser {\n /**\n * @param onNewLine - If defined, called separately with the start position of\n * each new line (in `parse()`, including the start of input).\n */\n constructor(onNewLine) {\n /** If true, space and sequence indicators count as indentation */\n this.atNewLine = true;\n /** If true, next token is a scalar value */\n this.atScalar = false;\n /** Current indentation level */\n this.indent = 0;\n /** Current offset since the start of parsing */\n this.offset = 0;\n /** On the same line with a block map key */\n this.onKeyLine = false;\n /** Top indicates the node that's currently being built */\n this.stack = [];\n /** The source of the current token, set in parse() */\n this.source = '';\n /** The type of the current token, set in parse() */\n this.type = '';\n // Must be defined after `next()`\n this.lexer = new lexer.Lexer();\n this.onNewLine = onNewLine;\n }\n /**\n * Parse `source` as a YAML stream.\n * If `incomplete`, a part of the last line may be left as a buffer for the next call.\n *\n * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.\n *\n * @returns A generator of tokens representing each directive, document, and other structure.\n */\n *parse(source, incomplete = false) {\n if (this.onNewLine && this.offset === 0)\n this.onNewLine(0);\n for (const lexeme of this.lexer.lex(source, incomplete))\n yield* this.next(lexeme);\n if (!incomplete)\n yield* this.end();\n }\n /**\n * Advance the parser by the `source` of one lexical token.\n */\n *next(source) {\n this.source = source;\n if (process.env.LOG_TOKENS)\n console.log('|', cst.prettyToken(source));\n if (this.atScalar) {\n this.atScalar = false;\n yield* this.step();\n this.offset += source.length;\n return;\n }\n const type = cst.tokenType(source);\n if (!type) {\n const message = `Not a YAML token: ${source}`;\n yield* this.pop({ type: 'error', offset: this.offset, message, source });\n this.offset += source.length;\n }\n else if (type === 'scalar') {\n this.atNewLine = false;\n this.atScalar = true;\n this.type = 'scalar';\n }\n else {\n this.type = type;\n yield* this.step();\n switch (type) {\n case 'newline':\n this.atNewLine = true;\n this.indent = 0;\n if (this.onNewLine)\n this.onNewLine(this.offset + source.length);\n break;\n case 'space':\n if (this.atNewLine && source[0] === ' ')\n this.indent += source.length;\n break;\n case 'explicit-key-ind':\n case 'map-value-ind':\n case 'seq-item-ind':\n if (this.atNewLine)\n this.indent += source.length;\n break;\n case 'doc-mode':\n case 'flow-error-end':\n return;\n default:\n this.atNewLine = false;\n }\n this.offset += source.length;\n }\n }\n /** Call at end of input to push out any remaining constructions */\n *end() {\n while (this.stack.length > 0)\n yield* this.pop();\n }\n get sourceToken() {\n const st = {\n type: this.type,\n offset: this.offset,\n indent: this.indent,\n source: this.source\n };\n return st;\n }\n *step() {\n const top = this.peek(1);\n if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {\n while (this.stack.length > 0)\n yield* this.pop();\n this.stack.push({\n type: 'doc-end',\n offset: this.offset,\n source: this.source\n });\n return;\n }\n if (!top)\n return yield* this.stream();\n switch (top.type) {\n case 'document':\n return yield* this.document(top);\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return yield* this.scalar(top);\n case 'block-scalar':\n return yield* this.blockScalar(top);\n case 'block-map':\n return yield* this.blockMap(top);\n case 'block-seq':\n return yield* this.blockSequence(top);\n case 'flow-collection':\n return yield* this.flowCollection(top);\n case 'doc-end':\n return yield* this.documentEnd(top);\n }\n /* istanbul ignore next should not happen */\n yield* this.pop();\n }\n peek(n) {\n return this.stack[this.stack.length - n];\n }\n *pop(error) {\n const token = error ?? this.stack.pop();\n /* istanbul ignore if should not happen */\n if (!token) {\n const message = 'Tried to pop an empty stack';\n yield { type: 'error', offset: this.offset, source: '', message };\n }\n else if (this.stack.length === 0) {\n yield token;\n }\n else {\n const top = this.peek(1);\n if (token.type === 'block-scalar') {\n // Block scalars use their parent rather than header indent\n token.indent = 'indent' in top ? top.indent : 0;\n }\n else if (token.type === 'flow-collection' && top.type === 'document') {\n // Ignore all indent for top-level flow collections\n token.indent = 0;\n }\n if (token.type === 'flow-collection')\n fixFlowSeqItems(token);\n switch (top.type) {\n case 'document':\n top.value = token;\n break;\n case 'block-scalar':\n top.props.push(token); // error\n break;\n case 'block-map': {\n const it = top.items[top.items.length - 1];\n if (it.value) {\n top.items.push({ start: [], key: token, sep: [] });\n this.onKeyLine = true;\n return;\n }\n else if (it.sep) {\n it.value = token;\n }\n else {\n Object.assign(it, { key: token, sep: [] });\n this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');\n return;\n }\n break;\n }\n case 'block-seq': {\n const it = top.items[top.items.length - 1];\n if (it.value)\n top.items.push({ start: [], value: token });\n else\n it.value = token;\n break;\n }\n case 'flow-collection': {\n const it = top.items[top.items.length - 1];\n if (!it || it.value)\n top.items.push({ start: [], key: token, sep: [] });\n else if (it.sep)\n it.value = token;\n else\n Object.assign(it, { key: token, sep: [] });\n return;\n }\n /* istanbul ignore next should not happen */\n default:\n yield* this.pop();\n yield* this.pop(token);\n }\n if ((top.type === 'document' ||\n top.type === 'block-map' ||\n top.type === 'block-seq') &&\n (token.type === 'block-map' || token.type === 'block-seq')) {\n const last = token.items[token.items.length - 1];\n if (last &&\n !last.sep &&\n !last.value &&\n last.start.length > 0 &&\n findNonEmptyIndex(last.start) === -1 &&\n (token.indent === 0 ||\n last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {\n if (top.type === 'document')\n top.end = last.start;\n else\n top.items.push({ start: last.start });\n token.items.splice(-1, 1);\n }\n }\n }\n }\n *stream() {\n switch (this.type) {\n case 'directive-line':\n yield { type: 'directive', offset: this.offset, source: this.source };\n return;\n case 'byte-order-mark':\n case 'space':\n case 'comment':\n case 'newline':\n yield this.sourceToken;\n return;\n case 'doc-mode':\n case 'doc-start': {\n const doc = {\n type: 'document',\n offset: this.offset,\n start: []\n };\n if (this.type === 'doc-start')\n doc.start.push(this.sourceToken);\n this.stack.push(doc);\n return;\n }\n }\n yield {\n type: 'error',\n offset: this.offset,\n message: `Unexpected ${this.type} token in YAML stream`,\n source: this.source\n };\n }\n *document(doc) {\n if (doc.value)\n return yield* this.lineEnd(doc);\n switch (this.type) {\n case 'doc-start': {\n if (findNonEmptyIndex(doc.start) !== -1) {\n yield* this.pop();\n yield* this.step();\n }\n else\n doc.start.push(this.sourceToken);\n return;\n }\n case 'anchor':\n case 'tag':\n case 'space':\n case 'comment':\n case 'newline':\n doc.start.push(this.sourceToken);\n return;\n }\n const bv = this.startBlockValue(doc);\n if (bv)\n this.stack.push(bv);\n else {\n yield {\n type: 'error',\n offset: this.offset,\n message: `Unexpected ${this.type} token in YAML document`,\n source: this.source\n };\n }\n }\n *scalar(scalar) {\n if (this.type === 'map-value-ind') {\n const prev = getPrevProps(this.peek(2));\n const start = getFirstKeyStartProps(prev);\n let sep;\n if (scalar.end) {\n sep = scalar.end;\n sep.push(this.sourceToken);\n delete scalar.end;\n }\n else\n sep = [this.sourceToken];\n const map = {\n type: 'block-map',\n offset: scalar.offset,\n indent: scalar.indent,\n items: [{ start, key: scalar, sep }]\n };\n this.onKeyLine = true;\n this.stack[this.stack.length - 1] = map;\n }\n else\n yield* this.lineEnd(scalar);\n }\n *blockScalar(scalar) {\n switch (this.type) {\n case 'space':\n case 'comment':\n case 'newline':\n scalar.props.push(this.sourceToken);\n return;\n case 'scalar':\n scalar.source = this.source;\n // block-scalar source includes trailing newline\n this.atNewLine = true;\n this.indent = 0;\n if (this.onNewLine) {\n let nl = this.source.indexOf('\\n') + 1;\n while (nl !== 0) {\n this.onNewLine(this.offset + nl);\n nl = this.source.indexOf('\\n', nl) + 1;\n }\n }\n yield* this.pop();\n break;\n /* istanbul ignore next should not happen */\n default:\n yield* this.pop();\n yield* this.step();\n }\n }\n *blockMap(map) {\n const it = map.items[map.items.length - 1];\n // it.sep is true-ish if pair already has key or : separator\n switch (this.type) {\n case 'newline':\n this.onKeyLine = false;\n if (it.value) {\n const end = 'end' in it.value ? it.value.end : undefined;\n const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n if (last?.type === 'comment')\n end?.push(this.sourceToken);\n else\n map.items.push({ start: [this.sourceToken] });\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n it.start.push(this.sourceToken);\n }\n return;\n case 'space':\n case 'comment':\n if (it.value) {\n map.items.push({ start: [this.sourceToken] });\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n if (this.atIndentedComment(it.start, map.indent)) {\n const prev = map.items[map.items.length - 2];\n const end = prev?.value?.end;\n if (Array.isArray(end)) {\n Array.prototype.push.apply(end, it.start);\n end.push(this.sourceToken);\n map.items.pop();\n return;\n }\n }\n it.start.push(this.sourceToken);\n }\n return;\n }\n if (this.indent >= map.indent) {\n const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;\n // For empty nodes, assign newline-separated not indented empty tokens to following node\n let start = [];\n if (atNextItem && it.sep && !it.value) {\n const nl = [];\n for (let i = 0; i < it.sep.length; ++i) {\n const st = it.sep[i];\n switch (st.type) {\n case 'newline':\n nl.push(i);\n break;\n case 'space':\n break;\n case 'comment':\n if (st.indent > map.indent)\n nl.length = 0;\n break;\n default:\n nl.length = 0;\n }\n }\n if (nl.length >= 2)\n start = it.sep.splice(nl[1]);\n }\n switch (this.type) {\n case 'anchor':\n case 'tag':\n if (atNextItem || it.value) {\n start.push(this.sourceToken);\n map.items.push({ start });\n this.onKeyLine = true;\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n it.start.push(this.sourceToken);\n }\n return;\n case 'explicit-key-ind':\n if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {\n it.start.push(this.sourceToken);\n }\n else if (atNextItem || it.value) {\n start.push(this.sourceToken);\n map.items.push({ start });\n }\n else {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [this.sourceToken] }]\n });\n }\n this.onKeyLine = true;\n return;\n case 'map-value-ind':\n if (includesToken(it.start, 'explicit-key-ind')) {\n if (!it.sep) {\n if (includesToken(it.start, 'newline')) {\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n }\n else {\n const start = getFirstKeyStartProps(it.start);\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n });\n }\n }\n else if (it.value) {\n map.items.push({ start: [], key: null, sep: [this.sourceToken] });\n }\n else if (includesToken(it.sep, 'map-value-ind')) {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n });\n }\n else if (isFlowToken(it.key) &&\n !includesToken(it.sep, 'newline')) {\n const start = getFirstKeyStartProps(it.start);\n const key = it.key;\n const sep = it.sep;\n sep.push(this.sourceToken);\n // @ts-expect-error type guard is wrong here\n delete it.key, delete it.sep;\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key, sep }]\n });\n }\n else if (start.length > 0) {\n // Not actually at next item\n it.sep = it.sep.concat(start, this.sourceToken);\n }\n else {\n it.sep.push(this.sourceToken);\n }\n }\n else {\n if (!it.sep) {\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n }\n else if (it.value || atNextItem) {\n map.items.push({ start, key: null, sep: [this.sourceToken] });\n }\n else if (includesToken(it.sep, 'map-value-ind')) {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [], key: null, sep: [this.sourceToken] }]\n });\n }\n else {\n it.sep.push(this.sourceToken);\n }\n }\n this.onKeyLine = true;\n return;\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar': {\n const fs = this.flowScalar(this.type);\n if (atNextItem || it.value) {\n map.items.push({ start, key: fs, sep: [] });\n this.onKeyLine = true;\n }\n else if (it.sep) {\n this.stack.push(fs);\n }\n else {\n Object.assign(it, { key: fs, sep: [] });\n this.onKeyLine = true;\n }\n return;\n }\n default: {\n const bv = this.startBlockValue(map);\n if (bv) {\n if (atNextItem &&\n bv.type !== 'block-seq' &&\n includesToken(it.start, 'explicit-key-ind')) {\n map.items.push({ start });\n }\n this.stack.push(bv);\n return;\n }\n }\n }\n }\n yield* this.pop();\n yield* this.step();\n }\n *blockSequence(seq) {\n const it = seq.items[seq.items.length - 1];\n switch (this.type) {\n case 'newline':\n if (it.value) {\n const end = 'end' in it.value ? it.value.end : undefined;\n const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n if (last?.type === 'comment')\n end?.push(this.sourceToken);\n else\n seq.items.push({ start: [this.sourceToken] });\n }\n else\n it.start.push(this.sourceToken);\n return;\n case 'space':\n case 'comment':\n if (it.value)\n seq.items.push({ start: [this.sourceToken] });\n else {\n if (this.atIndentedComment(it.start, seq.indent)) {\n const prev = seq.items[seq.items.length - 2];\n const end = prev?.value?.end;\n if (Array.isArray(end)) {\n Array.prototype.push.apply(end, it.start);\n end.push(this.sourceToken);\n seq.items.pop();\n return;\n }\n }\n it.start.push(this.sourceToken);\n }\n return;\n case 'anchor':\n case 'tag':\n if (it.value || this.indent <= seq.indent)\n break;\n it.start.push(this.sourceToken);\n return;\n case 'seq-item-ind':\n if (this.indent !== seq.indent)\n break;\n if (it.value || includesToken(it.start, 'seq-item-ind'))\n seq.items.push({ start: [this.sourceToken] });\n else\n it.start.push(this.sourceToken);\n return;\n }\n if (this.indent > seq.indent) {\n const bv = this.startBlockValue(seq);\n if (bv) {\n this.stack.push(bv);\n return;\n }\n }\n yield* this.pop();\n yield* this.step();\n }\n *flowCollection(fc) {\n const it = fc.items[fc.items.length - 1];\n if (this.type === 'flow-error-end') {\n let top;\n do {\n yield* this.pop();\n top = this.peek(1);\n } while (top && top.type === 'flow-collection');\n }\n else if (fc.end.length === 0) {\n switch (this.type) {\n case 'comma':\n case 'explicit-key-ind':\n if (!it || it.sep)\n fc.items.push({ start: [this.sourceToken] });\n else\n it.start.push(this.sourceToken);\n return;\n case 'map-value-ind':\n if (!it || it.value)\n fc.items.push({ start: [], key: null, sep: [this.sourceToken] });\n else if (it.sep)\n it.sep.push(this.sourceToken);\n else\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n return;\n case 'space':\n case 'comment':\n case 'newline':\n case 'anchor':\n case 'tag':\n if (!it || it.value)\n fc.items.push({ start: [this.sourceToken] });\n else if (it.sep)\n it.sep.push(this.sourceToken);\n else\n it.start.push(this.sourceToken);\n return;\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar': {\n const fs = this.flowScalar(this.type);\n if (!it || it.value)\n fc.items.push({ start: [], key: fs, sep: [] });\n else if (it.sep)\n this.stack.push(fs);\n else\n Object.assign(it, { key: fs, sep: [] });\n return;\n }\n case 'flow-map-end':\n case 'flow-seq-end':\n fc.end.push(this.sourceToken);\n return;\n }\n const bv = this.startBlockValue(fc);\n /* istanbul ignore else should not happen */\n if (bv)\n this.stack.push(bv);\n else {\n yield* this.pop();\n yield* this.step();\n }\n }\n else {\n const parent = this.peek(2);\n if (parent.type === 'block-map' &&\n ((this.type === 'map-value-ind' && parent.indent === fc.indent) ||\n (this.type === 'newline' &&\n !parent.items[parent.items.length - 1].sep))) {\n yield* this.pop();\n yield* this.step();\n }\n else if (this.type === 'map-value-ind' &&\n parent.type !== 'flow-collection') {\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n fixFlowSeqItems(fc);\n const sep = fc.end.splice(1, fc.end.length);\n sep.push(this.sourceToken);\n const map = {\n type: 'block-map',\n offset: fc.offset,\n indent: fc.indent,\n items: [{ start, key: fc, sep }]\n };\n this.onKeyLine = true;\n this.stack[this.stack.length - 1] = map;\n }\n else {\n yield* this.lineEnd(fc);\n }\n }\n }\n flowScalar(type) {\n if (this.onNewLine) {\n let nl = this.source.indexOf('\\n') + 1;\n while (nl !== 0) {\n this.onNewLine(this.offset + nl);\n nl = this.source.indexOf('\\n', nl) + 1;\n }\n }\n return {\n type,\n offset: this.offset,\n indent: this.indent,\n source: this.source\n };\n }\n startBlockValue(parent) {\n switch (this.type) {\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return this.flowScalar(this.type);\n case 'block-scalar-header':\n return {\n type: 'block-scalar',\n offset: this.offset,\n indent: this.indent,\n props: [this.sourceToken],\n source: ''\n };\n case 'flow-map-start':\n case 'flow-seq-start':\n return {\n type: 'flow-collection',\n offset: this.offset,\n indent: this.indent,\n start: this.sourceToken,\n items: [],\n end: []\n };\n case 'seq-item-ind':\n return {\n type: 'block-seq',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [this.sourceToken] }]\n };\n case 'explicit-key-ind': {\n this.onKeyLine = true;\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n start.push(this.sourceToken);\n return {\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start }]\n };\n }\n case 'map-value-ind': {\n this.onKeyLine = true;\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n return {\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n };\n }\n }\n return null;\n }\n atIndentedComment(start, indent) {\n if (this.type !== 'comment')\n return false;\n if (this.indent <= indent)\n return false;\n return start.every(st => st.type === 'newline' || st.type === 'space');\n }\n *documentEnd(docEnd) {\n if (this.type !== 'doc-mode') {\n if (docEnd.end)\n docEnd.end.push(this.sourceToken);\n else\n docEnd.end = [this.sourceToken];\n if (this.type === 'newline')\n yield* this.pop();\n }\n }\n *lineEnd(token) {\n switch (this.type) {\n case 'comma':\n case 'doc-start':\n case 'doc-end':\n case 'flow-seq-end':\n case 'flow-map-end':\n case 'map-value-ind':\n yield* this.pop();\n yield* this.step();\n break;\n case 'newline':\n this.onKeyLine = false;\n // fallthrough\n case 'space':\n case 'comment':\n default:\n // all other values are errors\n if (token.end)\n token.end.push(this.sourceToken);\n else\n token.end = [this.sourceToken];\n if (this.type === 'newline')\n yield* this.pop();\n }\n }\n}\n\nexports.Parser = Parser;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar errors = require('./errors.js');\nvar log = require('./log.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\n\nfunction parseOptions(options) {\n const prettyErrors = options.prettyErrors !== false;\n const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null;\n return { lineCounter: lineCounter$1, prettyErrors };\n}\n/**\n * Parse the input as a stream of YAML documents.\n *\n * Documents should be separated from each other by `...` or `---` marker lines.\n *\n * @returns If an empty `docs` array is returned, it will be of type\n * EmptyStream and contain additional stream information. In\n * TypeScript, you should use `'empty' in docs` as a type guard for it.\n */\nfunction parseAllDocuments(source, options = {}) {\n const { lineCounter, prettyErrors } = parseOptions(options);\n const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n const composer$1 = new composer.Composer(options);\n const docs = Array.from(composer$1.compose(parser$1.parse(source)));\n if (prettyErrors && lineCounter)\n for (const doc of docs) {\n doc.errors.forEach(errors.prettifyError(source, lineCounter));\n doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n }\n if (docs.length > 0)\n return docs;\n return Object.assign([], { empty: true }, composer$1.streamInfo());\n}\n/** Parse an input string into a single YAML.Document */\nfunction parseDocument(source, options = {}) {\n const { lineCounter, prettyErrors } = parseOptions(options);\n const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n const composer$1 = new composer.Composer(options);\n // `doc` is always set by compose.end(true) at the very latest\n let doc = null;\n for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) {\n if (!doc)\n doc = _doc;\n else if (doc.options.logLevel !== 'silent') {\n doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));\n break;\n }\n }\n if (prettyErrors && lineCounter) {\n doc.errors.forEach(errors.prettifyError(source, lineCounter));\n doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n }\n return doc;\n}\nfunction parse(src, reviver, options) {\n let _reviver = undefined;\n if (typeof reviver === 'function') {\n _reviver = reviver;\n }\n else if (options === undefined && reviver && typeof reviver === 'object') {\n options = reviver;\n }\n const doc = parseDocument(src, options);\n if (!doc)\n return null;\n doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning));\n if (doc.errors.length > 0) {\n if (doc.options.logLevel !== 'silent')\n throw doc.errors[0];\n else\n doc.errors = [];\n }\n return doc.toJS(Object.assign({ reviver: _reviver }, options));\n}\nfunction stringify(value, replacer, options) {\n let _replacer = null;\n if (typeof replacer === 'function' || Array.isArray(replacer)) {\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n }\n if (typeof options === 'string')\n options = options.length;\n if (typeof options === 'number') {\n const indent = Math.round(options);\n options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };\n }\n if (value === undefined) {\n const { keepUndefined } = options ?? replacer ?? {};\n if (!keepUndefined)\n return undefined;\n }\n return new Document.Document(value, _replacer, options).toString(options);\n}\n\nexports.parse = parse;\nexports.parseAllDocuments = parseAllDocuments;\nexports.parseDocument = parseDocument;\nexports.stringify = stringify;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar map = require('./common/map.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar tags = require('./tags.js');\n\nconst sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;\nclass Schema {\n constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {\n this.compat = Array.isArray(compat)\n ? tags.getTags(compat, 'compat')\n : compat\n ? tags.getTags(null, compat)\n : null;\n this.merge = !!merge;\n this.name = (typeof schema === 'string' && schema) || 'core';\n this.knownTags = resolveKnownTags ? tags.coreKnownTags : {};\n this.tags = tags.getTags(customTags, this.name);\n this.toStringOptions = toStringDefaults ?? null;\n Object.defineProperty(this, Node.MAP, { value: map.map });\n Object.defineProperty(this, Node.SCALAR, { value: string.string });\n Object.defineProperty(this, Node.SEQ, { value: seq.seq });\n // Used by createMap()\n this.sortMapEntries =\n typeof sortMapEntries === 'function'\n ? sortMapEntries\n : sortMapEntries === true\n ? sortMapEntriesByKey\n : null;\n }\n clone() {\n const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));\n copy.tags = this.tags.slice();\n return copy;\n }\n}\n\nexports.Schema = Schema;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nfunction createMap(schema, obj, ctx) {\n const { keepUndefined, replacer } = ctx;\n const map = new YAMLMap.YAMLMap(schema);\n const add = (key, value) => {\n if (typeof replacer === 'function')\n value = replacer.call(obj, key, value);\n else if (Array.isArray(replacer) && !replacer.includes(key))\n return;\n if (value !== undefined || keepUndefined)\n map.items.push(Pair.createPair(key, value, ctx));\n };\n if (obj instanceof Map) {\n for (const [key, value] of obj)\n add(key, value);\n }\n else if (obj && typeof obj === 'object') {\n for (const key of Object.keys(obj))\n add(key, obj[key]);\n }\n if (typeof schema.sortMapEntries === 'function') {\n map.items.sort(schema.sortMapEntries);\n }\n return map;\n}\nconst map = {\n collection: 'map',\n createNode: createMap,\n default: true,\n nodeClass: YAMLMap.YAMLMap,\n tag: 'tag:yaml.org,2002:map',\n resolve(map, onError) {\n if (!Node.isMap(map))\n onError('Expected a mapping for this tag');\n return map;\n }\n};\n\nexports.map = map;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst nullTag = {\n identify: value => value == null,\n createNode: () => new Scalar.Scalar(null),\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^(?:~|[Nn]ull|NULL)?$/,\n resolve: () => new Scalar.Scalar(null),\n stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)\n ? source\n : ctx.options.nullStr\n};\n\nexports.nullTag = nullTag;\n","'use strict';\n\nvar createNode = require('../../doc/createNode.js');\nvar Node = require('../../nodes/Node.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nfunction createSeq(schema, obj, ctx) {\n const { replacer } = ctx;\n const seq = new YAMLSeq.YAMLSeq(schema);\n if (obj && Symbol.iterator in Object(obj)) {\n let i = 0;\n for (let it of obj) {\n if (typeof replacer === 'function') {\n const key = obj instanceof Set ? it : String(i++);\n it = replacer.call(obj, key, it);\n }\n seq.items.push(createNode.createNode(it, undefined, ctx));\n }\n }\n return seq;\n}\nconst seq = {\n collection: 'seq',\n createNode: createSeq,\n default: true,\n nodeClass: YAMLSeq.YAMLSeq,\n tag: 'tag:yaml.org,2002:seq',\n resolve(seq, onError) {\n if (!Node.isSeq(seq))\n onError('Expected a sequence for this tag');\n return seq;\n }\n};\n\nexports.seq = seq;\n","'use strict';\n\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst string = {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: str => str,\n stringify(item, ctx, onComment, onChompKeep) {\n ctx = Object.assign({ actualString: true }, ctx);\n return stringifyString.stringifyString(item, ctx, onComment, onChompKeep);\n }\n};\n\nexports.string = string;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst boolTag = {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,\n resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'),\n stringify({ source, value }, ctx) {\n if (source && boolTag.test.test(source)) {\n const sv = source[0] === 't' || source[0] === 'T';\n if (value === sv)\n return source;\n }\n return value ? ctx.options.trueStr : ctx.options.falseStr;\n }\n};\n\nexports.boolTag = boolTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^(?:[-+]?\\.(?:inf|Inf|INF|nan|NaN|NAN))$/,\n resolve: str => str.slice(-3).toLowerCase() === 'nan'\n ? NaN\n : str[0] === '-'\n ? Number.NEGATIVE_INFINITY\n : Number.POSITIVE_INFINITY,\n stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+(?:\\.[0-9]*)?)[eE][-+]?[0-9]+$/,\n resolve: str => parseFloat(str),\n stringify(node) {\n const num = Number(node.value);\n return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n }\n};\nconst float = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+\\.[0-9]*)$/,\n resolve(str) {\n const node = new Scalar.Scalar(parseFloat(str));\n const dot = str.indexOf('.');\n if (dot !== -1 && str[str.length - 1] === '0')\n node.minFractionDigits = str.length - dot - 1;\n return node;\n },\n stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nconst intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));\nfunction intStringify(node, radix, prefix) {\n const { value } = node;\n if (intIdentify(value) && value >= 0)\n return prefix + value.toString(radix);\n return stringifyNumber.stringifyNumber(node);\n}\nconst intOct = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^0o[0-7]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt),\n stringify: node => intStringify(node, 8, '0o')\n};\nconst int = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^0x[0-9a-fA-F]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\n\nconst schema = [\n map.map,\n seq.seq,\n string.string,\n _null.nullTag,\n bool.boolTag,\n int.intOct,\n int.int,\n int.intHex,\n float.floatNaN,\n float.floatExp,\n float.float\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar map = require('../common/map.js');\nvar seq = require('../common/seq.js');\n\nfunction intIdentify(value) {\n return typeof value === 'bigint' || Number.isInteger(value);\n}\nconst stringifyJSON = ({ value }) => JSON.stringify(value);\nconst jsonScalars = [\n {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: str => str,\n stringify: stringifyJSON\n },\n {\n identify: value => value == null,\n createNode: () => new Scalar.Scalar(null),\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^null$/,\n resolve: () => null,\n stringify: stringifyJSON\n },\n {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^true|false$/,\n resolve: str => str === 'true',\n stringify: stringifyJSON\n },\n {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^-?(?:0|[1-9][0-9]*)$/,\n resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),\n stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value)\n },\n {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^-?(?:0|[1-9][0-9]*)(?:\\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,\n resolve: str => parseFloat(str),\n stringify: stringifyJSON\n }\n];\nconst jsonError = {\n default: true,\n tag: '',\n test: /^/,\n resolve(str, onError) {\n onError(`Unresolved plain scalar ${JSON.stringify(str)}`);\n return str;\n }\n};\nconst schema = [map.map, seq.seq].concat(jsonScalars, jsonError);\n\nexports.schema = schema;\n","'use strict';\n\nvar map = require('./common/map.js');\nvar _null = require('./common/null.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar bool = require('./core/bool.js');\nvar float = require('./core/float.js');\nvar int = require('./core/int.js');\nvar schema = require('./core/schema.js');\nvar schema$1 = require('./json/schema.js');\nvar binary = require('./yaml-1.1/binary.js');\nvar omap = require('./yaml-1.1/omap.js');\nvar pairs = require('./yaml-1.1/pairs.js');\nvar schema$2 = require('./yaml-1.1/schema.js');\nvar set = require('./yaml-1.1/set.js');\nvar timestamp = require('./yaml-1.1/timestamp.js');\n\nconst schemas = new Map([\n ['core', schema.schema],\n ['failsafe', [map.map, seq.seq, string.string]],\n ['json', schema$1.schema],\n ['yaml11', schema$2.schema],\n ['yaml-1.1', schema$2.schema]\n]);\nconst tagsByName = {\n binary: binary.binary,\n bool: bool.boolTag,\n float: float.float,\n floatExp: float.floatExp,\n floatNaN: float.floatNaN,\n floatTime: timestamp.floatTime,\n int: int.int,\n intHex: int.intHex,\n intOct: int.intOct,\n intTime: timestamp.intTime,\n map: map.map,\n null: _null.nullTag,\n omap: omap.omap,\n pairs: pairs.pairs,\n seq: seq.seq,\n set: set.set,\n timestamp: timestamp.timestamp\n};\nconst coreKnownTags = {\n 'tag:yaml.org,2002:binary': binary.binary,\n 'tag:yaml.org,2002:omap': omap.omap,\n 'tag:yaml.org,2002:pairs': pairs.pairs,\n 'tag:yaml.org,2002:set': set.set,\n 'tag:yaml.org,2002:timestamp': timestamp.timestamp\n};\nfunction getTags(customTags, schemaName) {\n let tags = schemas.get(schemaName);\n if (!tags) {\n if (Array.isArray(customTags))\n tags = [];\n else {\n const keys = Array.from(schemas.keys())\n .filter(key => key !== 'yaml11')\n .map(key => JSON.stringify(key))\n .join(', ');\n throw new Error(`Unknown schema \"${schemaName}\"; use one of ${keys} or define customTags array`);\n }\n }\n if (Array.isArray(customTags)) {\n for (const tag of customTags)\n tags = tags.concat(tag);\n }\n else if (typeof customTags === 'function') {\n tags = customTags(tags.slice());\n }\n return tags.map(tag => {\n if (typeof tag !== 'string')\n return tag;\n const tagObj = tagsByName[tag];\n if (tagObj)\n return tagObj;\n const keys = Object.keys(tagsByName)\n .map(key => JSON.stringify(key))\n .join(', ');\n throw new Error(`Unknown custom tag \"${tag}\"; use one of ${keys}`);\n });\n}\n\nexports.coreKnownTags = coreKnownTags;\nexports.getTags = getTags;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst binary = {\n identify: value => value instanceof Uint8Array,\n default: false,\n tag: 'tag:yaml.org,2002:binary',\n /**\n * Returns a Buffer in node and an Uint8Array in browsers\n *\n * To use the resulting buffer as an image, you'll want to do something like:\n *\n * const blob = new Blob([buffer], { type: 'image/jpeg' })\n * document.querySelector('#photo').src = URL.createObjectURL(blob)\n */\n resolve(src, onError) {\n if (typeof Buffer === 'function') {\n return Buffer.from(src, 'base64');\n }\n else if (typeof atob === 'function') {\n // On IE 11, atob() can't handle newlines\n const str = atob(src.replace(/[\\n\\r]/g, ''));\n const buffer = new Uint8Array(str.length);\n for (let i = 0; i < str.length; ++i)\n buffer[i] = str.charCodeAt(i);\n return buffer;\n }\n else {\n onError('This environment does not support reading binary tags; either Buffer or atob is required');\n return src;\n }\n },\n stringify({ comment, type, value }, ctx, onComment, onChompKeep) {\n const buf = value; // checked earlier by binary.identify()\n let str;\n if (typeof Buffer === 'function') {\n str =\n buf instanceof Buffer\n ? buf.toString('base64')\n : Buffer.from(buf.buffer).toString('base64');\n }\n else if (typeof btoa === 'function') {\n let s = '';\n for (let i = 0; i < buf.length; ++i)\n s += String.fromCharCode(buf[i]);\n str = btoa(s);\n }\n else {\n throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');\n }\n if (!type)\n type = Scalar.Scalar.BLOCK_LITERAL;\n if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);\n const n = Math.ceil(str.length / lineWidth);\n const lines = new Array(n);\n for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {\n lines[i] = str.substr(o, lineWidth);\n }\n str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\\n' : ' ');\n }\n return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);\n }\n};\n\nexports.binary = binary;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nfunction boolStringify({ value, source }, ctx) {\n const boolObj = value ? trueTag : falseTag;\n if (source && boolObj.test.test(source))\n return source;\n return value ? ctx.options.trueStr : ctx.options.falseStr;\n}\nconst trueTag = {\n identify: value => value === true,\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,\n resolve: () => new Scalar.Scalar(true),\n stringify: boolStringify\n};\nconst falseTag = {\n identify: value => value === false,\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,\n resolve: () => new Scalar.Scalar(false),\n stringify: boolStringify\n};\n\nexports.falseTag = falseTag;\nexports.trueTag = trueTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?\\.(?:inf|Inf|INF|nan|NaN|NAN)$/,\n resolve: (str) => str.slice(-3).toLowerCase() === 'nan'\n ? NaN\n : str[0] === '-'\n ? Number.NEGATIVE_INFINITY\n : Number.POSITIVE_INFINITY,\n stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:[0-9][0-9_]*)?(?:\\.[0-9_]*)?[eE][-+]?[0-9]+$/,\n resolve: (str) => parseFloat(str.replace(/_/g, '')),\n stringify(node) {\n const num = Number(node.value);\n return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n }\n};\nconst float = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:[0-9][0-9_]*)?\\.[0-9_]*$/,\n resolve(str) {\n const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, '')));\n const dot = str.indexOf('.');\n if (dot !== -1) {\n const f = str.substring(dot + 1).replace(/_/g, '');\n if (f[f.length - 1] === '0')\n node.minFractionDigits = f.length;\n }\n return node;\n },\n stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nfunction intResolve(str, offset, radix, { intAsBigInt }) {\n const sign = str[0];\n if (sign === '-' || sign === '+')\n offset += 1;\n str = str.substring(offset).replace(/_/g, '');\n if (intAsBigInt) {\n switch (radix) {\n case 2:\n str = `0b${str}`;\n break;\n case 8:\n str = `0o${str}`;\n break;\n case 16:\n str = `0x${str}`;\n break;\n }\n const n = BigInt(str);\n return sign === '-' ? BigInt(-1) * n : n;\n }\n const n = parseInt(str, radix);\n return sign === '-' ? -1 * n : n;\n}\nfunction intStringify(node, radix, prefix) {\n const { value } = node;\n if (intIdentify(value)) {\n const str = value.toString(radix);\n return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;\n }\n return stringifyNumber.stringifyNumber(node);\n}\nconst intBin = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'BIN',\n test: /^[-+]?0b[0-1_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),\n stringify: node => intStringify(node, 2, '0b')\n};\nconst intOct = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^[-+]?0[0-7_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),\n stringify: node => intStringify(node, 8, '0')\n};\nconst int = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9][0-9_]*$/,\n resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^[-+]?0x[0-9a-fA-F_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intBin = intBin;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\nvar toJS = require('../../nodes/toJS.js');\nvar Node = require('../../nodes/Node.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\nvar pairs = require('./pairs.js');\n\nclass YAMLOMap extends YAMLSeq.YAMLSeq {\n constructor() {\n super();\n this.add = YAMLMap.YAMLMap.prototype.add.bind(this);\n this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this);\n this.get = YAMLMap.YAMLMap.prototype.get.bind(this);\n this.has = YAMLMap.YAMLMap.prototype.has.bind(this);\n this.set = YAMLMap.YAMLMap.prototype.set.bind(this);\n this.tag = YAMLOMap.tag;\n }\n /**\n * If `ctx` is given, the return type is actually `Map`,\n * but TypeScript won't allow widening the signature of a child method.\n */\n toJSON(_, ctx) {\n if (!ctx)\n return super.toJSON(_);\n const map = new Map();\n if (ctx?.onCreate)\n ctx.onCreate(map);\n for (const pair of this.items) {\n let key, value;\n if (Node.isPair(pair)) {\n key = toJS.toJS(pair.key, '', ctx);\n value = toJS.toJS(pair.value, key, ctx);\n }\n else {\n key = toJS.toJS(pair, '', ctx);\n }\n if (map.has(key))\n throw new Error('Ordered maps must not include duplicate keys');\n map.set(key, value);\n }\n return map;\n }\n}\nYAMLOMap.tag = 'tag:yaml.org,2002:omap';\nconst omap = {\n collection: 'seq',\n identify: value => value instanceof Map,\n nodeClass: YAMLOMap,\n default: false,\n tag: 'tag:yaml.org,2002:omap',\n resolve(seq, onError) {\n const pairs$1 = pairs.resolvePairs(seq, onError);\n const seenKeys = [];\n for (const { key } of pairs$1.items) {\n if (Node.isScalar(key)) {\n if (seenKeys.includes(key.value)) {\n onError(`Ordered maps must not include duplicate keys: ${key.value}`);\n }\n else {\n seenKeys.push(key.value);\n }\n }\n }\n return Object.assign(new YAMLOMap(), pairs$1);\n },\n createNode(schema, iterable, ctx) {\n const pairs$1 = pairs.createPairs(schema, iterable, ctx);\n const omap = new YAMLOMap();\n omap.items = pairs$1.items;\n return omap;\n }\n};\n\nexports.YAMLOMap = YAMLOMap;\nexports.omap = omap;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar Scalar = require('../../nodes/Scalar.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nfunction resolvePairs(seq, onError) {\n if (Node.isSeq(seq)) {\n for (let i = 0; i < seq.items.length; ++i) {\n let item = seq.items[i];\n if (Node.isPair(item))\n continue;\n else if (Node.isMap(item)) {\n if (item.items.length > 1)\n onError('Each pair must have its own sequence indicator');\n const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null));\n if (item.commentBefore)\n pair.key.commentBefore = pair.key.commentBefore\n ? `${item.commentBefore}\\n${pair.key.commentBefore}`\n : item.commentBefore;\n if (item.comment) {\n const cn = pair.value ?? pair.key;\n cn.comment = cn.comment\n ? `${item.comment}\\n${cn.comment}`\n : item.comment;\n }\n item = pair;\n }\n seq.items[i] = Node.isPair(item) ? item : new Pair.Pair(item);\n }\n }\n else\n onError('Expected a sequence for this tag');\n return seq;\n}\nfunction createPairs(schema, iterable, ctx) {\n const { replacer } = ctx;\n const pairs = new YAMLSeq.YAMLSeq(schema);\n pairs.tag = 'tag:yaml.org,2002:pairs';\n let i = 0;\n if (iterable && Symbol.iterator in Object(iterable))\n for (let it of iterable) {\n if (typeof replacer === 'function')\n it = replacer.call(iterable, String(i++), it);\n let key, value;\n if (Array.isArray(it)) {\n if (it.length === 2) {\n key = it[0];\n value = it[1];\n }\n else\n throw new TypeError(`Expected [key, value] tuple: ${it}`);\n }\n else if (it && it instanceof Object) {\n const keys = Object.keys(it);\n if (keys.length === 1) {\n key = keys[0];\n value = it[key];\n }\n else\n throw new TypeError(`Expected { key: value } tuple: ${it}`);\n }\n else {\n key = it;\n }\n pairs.items.push(Pair.createPair(key, value, ctx));\n }\n return pairs;\n}\nconst pairs = {\n collection: 'seq',\n default: false,\n tag: 'tag:yaml.org,2002:pairs',\n resolve: resolvePairs,\n createNode: createPairs\n};\n\nexports.createPairs = createPairs;\nexports.pairs = pairs;\nexports.resolvePairs = resolvePairs;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar binary = require('./binary.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\nvar omap = require('./omap.js');\nvar pairs = require('./pairs.js');\nvar set = require('./set.js');\nvar timestamp = require('./timestamp.js');\n\nconst schema = [\n map.map,\n seq.seq,\n string.string,\n _null.nullTag,\n bool.trueTag,\n bool.falseTag,\n int.intBin,\n int.intOct,\n int.int,\n int.intHex,\n float.floatNaN,\n float.floatExp,\n float.float,\n binary.binary,\n omap.omap,\n pairs.pairs,\n set.set,\n timestamp.intTime,\n timestamp.floatTime,\n timestamp.timestamp\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nclass YAMLSet extends YAMLMap.YAMLMap {\n constructor(schema) {\n super(schema);\n this.tag = YAMLSet.tag;\n }\n add(key) {\n let pair;\n if (Node.isPair(key))\n pair = key;\n else if (key &&\n typeof key === 'object' &&\n 'key' in key &&\n 'value' in key &&\n key.value === null)\n pair = new Pair.Pair(key.key, null);\n else\n pair = new Pair.Pair(key, null);\n const prev = YAMLMap.findPair(this.items, pair.key);\n if (!prev)\n this.items.push(pair);\n }\n /**\n * If `keepPair` is `true`, returns the Pair matching `key`.\n * Otherwise, returns the value of that Pair's key.\n */\n get(key, keepPair) {\n const pair = YAMLMap.findPair(this.items, key);\n return !keepPair && Node.isPair(pair)\n ? Node.isScalar(pair.key)\n ? pair.key.value\n : pair.key\n : pair;\n }\n set(key, value) {\n if (typeof value !== 'boolean')\n throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);\n const prev = YAMLMap.findPair(this.items, key);\n if (prev && !value) {\n this.items.splice(this.items.indexOf(prev), 1);\n }\n else if (!prev && value) {\n this.items.push(new Pair.Pair(key));\n }\n }\n toJSON(_, ctx) {\n return super.toJSON(_, ctx, Set);\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n if (this.hasAllNullValues(true))\n return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);\n else\n throw new Error('Set items must all have null values');\n }\n}\nYAMLSet.tag = 'tag:yaml.org,2002:set';\nconst set = {\n collection: 'map',\n identify: value => value instanceof Set,\n nodeClass: YAMLSet,\n default: false,\n tag: 'tag:yaml.org,2002:set',\n resolve(map, onError) {\n if (Node.isMap(map)) {\n if (map.hasAllNullValues(true))\n return Object.assign(new YAMLSet(), map);\n else\n onError('Set items must all have null values');\n }\n else\n onError('Expected a mapping for this tag');\n return map;\n },\n createNode(schema, iterable, ctx) {\n const { replacer } = ctx;\n const set = new YAMLSet(schema);\n if (iterable && Symbol.iterator in Object(iterable))\n for (let value of iterable) {\n if (typeof replacer === 'function')\n value = replacer.call(iterable, value, value);\n set.items.push(Pair.createPair(value, null, ctx));\n }\n return set;\n }\n};\n\nexports.YAMLSet = YAMLSet;\nexports.set = set;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\n/** Internal types handle bigint as number, because TS can't figure it out. */\nfunction parseSexagesimal(str, asBigInt) {\n const sign = str[0];\n const parts = sign === '-' || sign === '+' ? str.substring(1) : str;\n const num = (n) => asBigInt ? BigInt(n) : Number(n);\n const res = parts\n .replace(/_/g, '')\n .split(':')\n .reduce((res, p) => res * num(60) + num(p), num(0));\n return (sign === '-' ? num(-1) * res : res);\n}\n/**\n * hhhh:mm:ss.sss\n *\n * Internal types handle bigint as number, because TS can't figure it out.\n */\nfunction stringifySexagesimal(node) {\n let { value } = node;\n let num = (n) => n;\n if (typeof value === 'bigint')\n num = n => BigInt(n);\n else if (isNaN(value) || !isFinite(value))\n return stringifyNumber.stringifyNumber(node);\n let sign = '';\n if (value < 0) {\n sign = '-';\n value *= num(-1);\n }\n const _60 = num(60);\n const parts = [value % _60]; // seconds, including ms\n if (value < 60) {\n parts.unshift(0); // at least one : is required\n }\n else {\n value = (value - parts[0]) / _60;\n parts.unshift(value % _60); // minutes\n if (value >= 60) {\n value = (value - parts[0]) / _60;\n parts.unshift(value); // hours\n }\n }\n return (sign +\n parts\n .map(n => (n < 10 ? '0' + String(n) : String(n)))\n .join(':')\n .replace(/000000\\d*$/, '') // % 60 may introduce error\n );\n}\nconst intTime = {\n identify: value => typeof value === 'bigint' || Number.isInteger(value),\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'TIME',\n test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,\n resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),\n stringify: stringifySexagesimal\n};\nconst floatTime = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'TIME',\n test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*$/,\n resolve: str => parseSexagesimal(str, false),\n stringify: stringifySexagesimal\n};\nconst timestamp = {\n identify: value => value instanceof Date,\n default: true,\n tag: 'tag:yaml.org,2002:timestamp',\n // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part\n // may be omitted altogether, resulting in a date format. In such a case, the time part is\n // assumed to be 00:00:00Z (start of day, UTC).\n test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd\n '(?:' + // time is optional\n '(?:t|T|[ \\\\t]+)' + // t | T | whitespace\n '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?\n '(?:[ \\\\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30\n ')?$'),\n resolve(str) {\n const match = str.match(timestamp.test);\n if (!match)\n throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');\n const [, year, month, day, hour, minute, second] = match.map(Number);\n const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;\n let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);\n const tz = match[8];\n if (tz && tz !== 'Z') {\n let d = parseSexagesimal(tz, false);\n if (Math.abs(d) < 30)\n d *= 60;\n date -= 60000 * d;\n }\n return new Date(date);\n },\n stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\\.000Z$/, '')\n};\n\nexports.floatTime = floatTime;\nexports.intTime = intTime;\nexports.timestamp = timestamp;\n","'use strict';\n\nconst FOLD_FLOW = 'flow';\nconst FOLD_BLOCK = 'block';\nconst FOLD_QUOTED = 'quoted';\n/**\n * Tries to keep input at up to `lineWidth` characters, splitting only on spaces\n * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are\n * terminated with `\\n` and started with `indent`.\n */\nfunction foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {\n if (!lineWidth || lineWidth < 0)\n return text;\n const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);\n if (text.length <= endStep)\n return text;\n const folds = [];\n const escapedFolds = {};\n let end = lineWidth - indent.length;\n if (typeof indentAtStart === 'number') {\n if (indentAtStart > lineWidth - Math.max(2, minContentWidth))\n folds.push(0);\n else\n end = lineWidth - indentAtStart;\n }\n let split = undefined;\n let prev = undefined;\n let overflow = false;\n let i = -1;\n let escStart = -1;\n let escEnd = -1;\n if (mode === FOLD_BLOCK) {\n i = consumeMoreIndentedLines(text, i);\n if (i !== -1)\n end = i + endStep;\n }\n for (let ch; (ch = text[(i += 1)]);) {\n if (mode === FOLD_QUOTED && ch === '\\\\') {\n escStart = i;\n switch (text[i + 1]) {\n case 'x':\n i += 3;\n break;\n case 'u':\n i += 5;\n break;\n case 'U':\n i += 9;\n break;\n default:\n i += 1;\n }\n escEnd = i;\n }\n if (ch === '\\n') {\n if (mode === FOLD_BLOCK)\n i = consumeMoreIndentedLines(text, i);\n end = i + endStep;\n split = undefined;\n }\n else {\n if (ch === ' ' &&\n prev &&\n prev !== ' ' &&\n prev !== '\\n' &&\n prev !== '\\t') {\n // space surrounded by non-space can be replaced with newline + indent\n const next = text[i + 1];\n if (next && next !== ' ' && next !== '\\n' && next !== '\\t')\n split = i;\n }\n if (i >= end) {\n if (split) {\n folds.push(split);\n end = split + endStep;\n split = undefined;\n }\n else if (mode === FOLD_QUOTED) {\n // white-space collected at end may stretch past lineWidth\n while (prev === ' ' || prev === '\\t') {\n prev = ch;\n ch = text[(i += 1)];\n overflow = true;\n }\n // Account for newline escape, but don't break preceding escape\n const j = i > escEnd + 1 ? i - 2 : escStart - 1;\n // Bail out if lineWidth & minContentWidth are shorter than an escape string\n if (escapedFolds[j])\n return text;\n folds.push(j);\n escapedFolds[j] = true;\n end = j + endStep;\n split = undefined;\n }\n else {\n overflow = true;\n }\n }\n }\n prev = ch;\n }\n if (overflow && onOverflow)\n onOverflow();\n if (folds.length === 0)\n return text;\n if (onFold)\n onFold();\n let res = text.slice(0, folds[0]);\n for (let i = 0; i < folds.length; ++i) {\n const fold = folds[i];\n const end = folds[i + 1] || text.length;\n if (fold === 0)\n res = `\\n${indent}${text.slice(0, end)}`;\n else {\n if (mode === FOLD_QUOTED && escapedFolds[fold])\n res += `${text[fold]}\\\\`;\n res += `\\n${indent}${text.slice(fold + 1, end)}`;\n }\n }\n return res;\n}\n/**\n * Presumes `i + 1` is at the start of a line\n * @returns index of last newline in more-indented block\n */\nfunction consumeMoreIndentedLines(text, i) {\n let ch = text[i + 1];\n while (ch === ' ' || ch === '\\t') {\n do {\n ch = text[(i += 1)];\n } while (ch && ch !== '\\n');\n ch = text[i + 1];\n }\n return i;\n}\n\nexports.FOLD_BLOCK = FOLD_BLOCK;\nexports.FOLD_FLOW = FOLD_FLOW;\nexports.FOLD_QUOTED = FOLD_QUOTED;\nexports.foldFlowLines = foldFlowLines;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar Node = require('../nodes/Node.js');\nvar stringifyComment = require('./stringifyComment.js');\nvar stringifyString = require('./stringifyString.js');\n\nfunction createStringifyContext(doc, options) {\n const opt = Object.assign({\n blockQuote: true,\n commentString: stringifyComment.stringifyComment,\n defaultKeyType: null,\n defaultStringType: 'PLAIN',\n directives: null,\n doubleQuotedAsJSON: false,\n doubleQuotedMinMultiLineLength: 40,\n falseStr: 'false',\n flowCollectionPadding: true,\n indentSeq: true,\n lineWidth: 80,\n minContentWidth: 20,\n nullStr: 'null',\n simpleKeys: false,\n singleQuote: null,\n trueStr: 'true',\n verifyAliasOrder: true\n }, doc.schema.toStringOptions, options);\n let inFlow;\n switch (opt.collectionStyle) {\n case 'block':\n inFlow = false;\n break;\n case 'flow':\n inFlow = true;\n break;\n default:\n inFlow = null;\n }\n return {\n anchors: new Set(),\n doc,\n flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '',\n indent: '',\n indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',\n inFlow,\n options: opt\n };\n}\nfunction getTagObject(tags, item) {\n if (item.tag) {\n const match = tags.filter(t => t.tag === item.tag);\n if (match.length > 0)\n return match.find(t => t.format === item.format) ?? match[0];\n }\n let tagObj = undefined;\n let obj;\n if (Node.isScalar(item)) {\n obj = item.value;\n const match = tags.filter(t => t.identify?.(obj));\n tagObj =\n match.find(t => t.format === item.format) ?? match.find(t => !t.format);\n }\n else {\n obj = item;\n tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);\n }\n if (!tagObj) {\n const name = obj?.constructor?.name ?? typeof obj;\n throw new Error(`Tag not resolved for ${name} value`);\n }\n return tagObj;\n}\n// needs to be called before value stringifier to allow for circular anchor refs\nfunction stringifyProps(node, tagObj, { anchors: anchors$1, doc }) {\n if (!doc.directives)\n return '';\n const props = [];\n const anchor = (Node.isScalar(node) || Node.isCollection(node)) && node.anchor;\n if (anchor && anchors.anchorIsValid(anchor)) {\n anchors$1.add(anchor);\n props.push(`&${anchor}`);\n }\n const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;\n if (tag)\n props.push(doc.directives.tagString(tag));\n return props.join(' ');\n}\nfunction stringify(item, ctx, onComment, onChompKeep) {\n if (Node.isPair(item))\n return item.toString(ctx, onComment, onChompKeep);\n if (Node.isAlias(item)) {\n if (ctx.doc.directives)\n return item.toString(ctx);\n if (ctx.resolvedAliases?.has(item)) {\n throw new TypeError(`Cannot stringify circular structure without alias nodes`);\n }\n else {\n if (ctx.resolvedAliases)\n ctx.resolvedAliases.add(item);\n else\n ctx.resolvedAliases = new Set([item]);\n item = item.resolve(ctx.doc);\n }\n }\n let tagObj = undefined;\n const node = Node.isNode(item)\n ? item\n : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });\n if (!tagObj)\n tagObj = getTagObject(ctx.doc.schema.tags, node);\n const props = stringifyProps(node, tagObj, ctx);\n if (props.length > 0)\n ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1;\n const str = typeof tagObj.stringify === 'function'\n ? tagObj.stringify(node, ctx, onComment, onChompKeep)\n : Node.isScalar(node)\n ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep)\n : node.toString(ctx, onComment, onChompKeep);\n if (!props)\n return str;\n return Node.isScalar(node) || str[0] === '{' || str[0] === '['\n ? `${props} ${str}`\n : `${props}\\n${ctx.indent}${str}`;\n}\n\nexports.createStringifyContext = createStringifyContext;\nexports.stringify = stringify;\n","'use strict';\n\nvar Collection = require('../nodes/Collection.js');\nvar Node = require('../nodes/Node.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyCollection(collection, ctx, options) {\n const flow = ctx.inFlow ?? collection.flow;\n const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;\n return stringify(collection, ctx, options);\n}\nfunction stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {\n const { indent, options: { commentString } } = ctx;\n const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });\n let chompKeep = false; // flag for the preceding node's status\n const lines = [];\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n let comment = null;\n if (Node.isNode(item)) {\n if (!chompKeep && item.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, item.commentBefore, chompKeep);\n if (item.comment)\n comment = item.comment;\n }\n else if (Node.isPair(item)) {\n const ik = Node.isNode(item.key) ? item.key : null;\n if (ik) {\n if (!chompKeep && ik.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);\n }\n }\n chompKeep = false;\n let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true));\n if (comment)\n str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n if (chompKeep && comment)\n chompKeep = false;\n lines.push(blockItemPrefix + str);\n }\n let str;\n if (lines.length === 0) {\n str = flowChars.start + flowChars.end;\n }\n else {\n str = lines[0];\n for (let i = 1; i < lines.length; ++i) {\n const line = lines[i];\n str += line ? `\\n${indent}${line}` : '\\n';\n }\n }\n if (comment) {\n str += '\\n' + stringifyComment.indentComment(commentString(comment), indent);\n if (onComment)\n onComment();\n }\n else if (chompKeep && onChompKeep)\n onChompKeep();\n return str;\n}\nfunction stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {\n const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx;\n itemIndent += indentStep;\n const itemCtx = Object.assign({}, ctx, {\n indent: itemIndent,\n inFlow: true,\n type: null\n });\n let reqNewline = false;\n let linesAtValue = 0;\n const lines = [];\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n let comment = null;\n if (Node.isNode(item)) {\n if (item.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, item.commentBefore, false);\n if (item.comment)\n comment = item.comment;\n }\n else if (Node.isPair(item)) {\n const ik = Node.isNode(item.key) ? item.key : null;\n if (ik) {\n if (ik.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, ik.commentBefore, false);\n if (ik.comment)\n reqNewline = true;\n }\n const iv = Node.isNode(item.value) ? item.value : null;\n if (iv) {\n if (iv.comment)\n comment = iv.comment;\n if (iv.commentBefore)\n reqNewline = true;\n }\n else if (item.value == null && ik && ik.comment) {\n comment = ik.comment;\n }\n }\n if (comment)\n reqNewline = true;\n let str = stringify.stringify(item, itemCtx, () => (comment = null));\n if (i < items.length - 1)\n str += ',';\n if (comment)\n str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n if (!reqNewline && (lines.length > linesAtValue || str.includes('\\n')))\n reqNewline = true;\n lines.push(str);\n linesAtValue = lines.length;\n }\n let str;\n const { start, end } = flowChars;\n if (lines.length === 0) {\n str = start + end;\n }\n else {\n if (!reqNewline) {\n const len = lines.reduce((sum, line) => sum + line.length + 2, 2);\n reqNewline = len > Collection.Collection.maxFlowStringSingleLineLength;\n }\n if (reqNewline) {\n str = start;\n for (const line of lines)\n str += line ? `\\n${indentStep}${indent}${line}` : '\\n';\n str += `\\n${indent}${end}`;\n }\n else {\n str = `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`;\n }\n }\n if (comment) {\n str += stringifyComment.lineComment(str, commentString(comment), indent);\n if (onComment)\n onComment();\n }\n return str;\n}\nfunction addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {\n if (comment && chompKeep)\n comment = comment.replace(/^\\n+/, '');\n if (comment) {\n const ic = stringifyComment.indentComment(commentString(comment), indent);\n lines.push(ic.trimStart()); // Avoid double indent on first line\n }\n}\n\nexports.stringifyCollection = stringifyCollection;\n","'use strict';\n\n/**\n * Stringifies a comment.\n *\n * Empty comment lines are left empty,\n * lines consisting of a single space are replaced by `#`,\n * and all other lines are prefixed with a `#`.\n */\nconst stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');\nfunction indentComment(comment, indent) {\n if (/^\\n+$/.test(comment))\n return comment.substring(1);\n return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;\n}\nconst lineComment = (str, indent, comment) => str.endsWith('\\n')\n ? indentComment(comment, indent)\n : comment.includes('\\n')\n ? '\\n' + indentComment(comment, indent)\n : (str.endsWith(' ') ? '' : ' ') + comment;\n\nexports.indentComment = indentComment;\nexports.lineComment = lineComment;\nexports.stringifyComment = stringifyComment;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyDocument(doc, options) {\n const lines = [];\n let hasDirectives = options.directives === true;\n if (options.directives !== false && doc.directives) {\n const dir = doc.directives.toString(doc);\n if (dir) {\n lines.push(dir);\n hasDirectives = true;\n }\n else if (doc.directives.docStart)\n hasDirectives = true;\n }\n if (hasDirectives)\n lines.push('---');\n const ctx = stringify.createStringifyContext(doc, options);\n const { commentString } = ctx.options;\n if (doc.commentBefore) {\n if (lines.length !== 1)\n lines.unshift('');\n const cs = commentString(doc.commentBefore);\n lines.unshift(stringifyComment.indentComment(cs, ''));\n }\n let chompKeep = false;\n let contentComment = null;\n if (doc.contents) {\n if (Node.isNode(doc.contents)) {\n if (doc.contents.spaceBefore && hasDirectives)\n lines.push('');\n if (doc.contents.commentBefore) {\n const cs = commentString(doc.contents.commentBefore);\n lines.push(stringifyComment.indentComment(cs, ''));\n }\n // top-level block scalars need to be indented if followed by a comment\n ctx.forceBlockIndent = !!doc.comment;\n contentComment = doc.contents.comment;\n }\n const onChompKeep = contentComment ? undefined : () => (chompKeep = true);\n let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep);\n if (contentComment)\n body += stringifyComment.lineComment(body, '', commentString(contentComment));\n if ((body[0] === '|' || body[0] === '>') &&\n lines[lines.length - 1] === '---') {\n // Top-level block scalars with a preceding doc marker ought to use the\n // same line for their header.\n lines[lines.length - 1] = `--- ${body}`;\n }\n else\n lines.push(body);\n }\n else {\n lines.push(stringify.stringify(doc.contents, ctx));\n }\n if (doc.directives?.docEnd) {\n if (doc.comment) {\n const cs = commentString(doc.comment);\n if (cs.includes('\\n')) {\n lines.push('...');\n lines.push(stringifyComment.indentComment(cs, ''));\n }\n else {\n lines.push(`... ${cs}`);\n }\n }\n else {\n lines.push('...');\n }\n }\n else {\n let dc = doc.comment;\n if (dc && chompKeep)\n dc = dc.replace(/^\\n+/, '');\n if (dc) {\n if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')\n lines.push('');\n lines.push(stringifyComment.indentComment(commentString(dc), ''));\n }\n }\n return lines.join('\\n') + '\\n';\n}\n\nexports.stringifyDocument = stringifyDocument;\n","'use strict';\n\nfunction stringifyNumber({ format, minFractionDigits, tag, value }) {\n if (typeof value === 'bigint')\n return String(value);\n const num = typeof value === 'number' ? value : Number(value);\n if (!isFinite(num))\n return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';\n let n = JSON.stringify(value);\n if (!format &&\n minFractionDigits &&\n (!tag || tag === 'tag:yaml.org,2002:float') &&\n /^\\d/.test(n)) {\n let i = n.indexOf('.');\n if (i < 0) {\n i = n.length;\n n += '.';\n }\n let d = minFractionDigits - (n.length - i - 1);\n while (d-- > 0)\n n += '0';\n }\n return n;\n}\n\nexports.stringifyNumber = stringifyNumber;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyPair({ key, value }, ctx, onComment, onChompKeep) {\n const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;\n let keyComment = (Node.isNode(key) && key.comment) || null;\n if (simpleKeys) {\n if (keyComment) {\n throw new Error('With simple keys, key nodes cannot have comments');\n }\n if (Node.isCollection(key)) {\n const msg = 'With simple keys, collection cannot be used as a key value';\n throw new Error(msg);\n }\n }\n let explicitKey = !simpleKeys &&\n (!key ||\n (keyComment && value == null && !ctx.inFlow) ||\n Node.isCollection(key) ||\n (Node.isScalar(key)\n ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL\n : typeof key === 'object'));\n ctx = Object.assign({}, ctx, {\n allNullValues: false,\n implicitKey: !explicitKey && (simpleKeys || !allNullValues),\n indent: indent + indentStep\n });\n let keyCommentDone = false;\n let chompKeep = false;\n let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));\n if (!explicitKey && !ctx.inFlow && str.length > 1024) {\n if (simpleKeys)\n throw new Error('With simple keys, single line scalar must not span more than 1024 characters');\n explicitKey = true;\n }\n if (ctx.inFlow) {\n if (allNullValues || value == null) {\n if (keyCommentDone && onComment)\n onComment();\n return str === '' ? '?' : explicitKey ? `? ${str}` : str;\n }\n }\n else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {\n str = `? ${str}`;\n if (keyComment && !keyCommentDone) {\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n }\n else if (chompKeep && onChompKeep)\n onChompKeep();\n return str;\n }\n if (keyCommentDone)\n keyComment = null;\n if (explicitKey) {\n if (keyComment)\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n str = `? ${str}\\n${indent}:`;\n }\n else {\n str = `${str}:`;\n if (keyComment)\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n }\n let vsb, vcb, valueComment;\n if (Node.isNode(value)) {\n vsb = !!value.spaceBefore;\n vcb = value.commentBefore;\n valueComment = value.comment;\n }\n else {\n vsb = false;\n vcb = null;\n valueComment = null;\n if (value && typeof value === 'object')\n value = doc.createNode(value);\n }\n ctx.implicitKey = false;\n if (!explicitKey && !keyComment && Node.isScalar(value))\n ctx.indentAtStart = str.length + 1;\n chompKeep = false;\n if (!indentSeq &&\n indentStep.length >= 2 &&\n !ctx.inFlow &&\n !explicitKey &&\n Node.isSeq(value) &&\n !value.flow &&\n !value.tag &&\n !value.anchor) {\n // If indentSeq === false, consider '- ' as part of indentation where possible\n ctx.indent = ctx.indent.substring(2);\n }\n let valueCommentDone = false;\n const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));\n let ws = ' ';\n if (keyComment || vsb || vcb) {\n ws = vsb ? '\\n' : '';\n if (vcb) {\n const cs = commentString(vcb);\n ws += `\\n${stringifyComment.indentComment(cs, ctx.indent)}`;\n }\n if (valueStr === '' && !ctx.inFlow) {\n if (ws === '\\n')\n ws = '\\n\\n';\n }\n else {\n ws += `\\n${ctx.indent}`;\n }\n }\n else if (!explicitKey && Node.isCollection(value)) {\n const vs0 = valueStr[0];\n const nl0 = valueStr.indexOf('\\n');\n const hasNewline = nl0 !== -1;\n const flow = ctx.inFlow ?? value.flow ?? value.items.length === 0;\n if (hasNewline || !flow) {\n let hasPropsLine = false;\n if (hasNewline && (vs0 === '&' || vs0 === '!')) {\n let sp0 = valueStr.indexOf(' ');\n if (vs0 === '&' &&\n sp0 !== -1 &&\n sp0 < nl0 &&\n valueStr[sp0 + 1] === '!') {\n sp0 = valueStr.indexOf(' ', sp0 + 1);\n }\n if (sp0 === -1 || nl0 < sp0)\n hasPropsLine = true;\n }\n if (!hasPropsLine)\n ws = `\\n${ctx.indent}`;\n }\n }\n else if (valueStr === '' || valueStr[0] === '\\n') {\n ws = '';\n }\n str += ws + valueStr;\n if (ctx.inFlow) {\n if (valueCommentDone && onComment)\n onComment();\n }\n else if (valueComment && !valueCommentDone) {\n str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment));\n }\n else if (chompKeep && onChompKeep) {\n onChompKeep();\n }\n return str;\n}\n\nexports.stringifyPair = stringifyPair;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar foldFlowLines = require('./foldFlowLines.js');\n\nconst getFoldOptions = (ctx) => ({\n indentAtStart: ctx.indentAtStart,\n lineWidth: ctx.options.lineWidth,\n minContentWidth: ctx.options.minContentWidth\n});\n// Also checks for lines starting with %, as parsing the output as YAML 1.1 will\n// presume that's starting a new document.\nconst containsDocumentMarker = (str) => /^(%|---|\\.\\.\\.)/m.test(str);\nfunction lineLengthOverLimit(str, lineWidth, indentLength) {\n if (!lineWidth || lineWidth < 0)\n return false;\n const limit = lineWidth - indentLength;\n const strLen = str.length;\n if (strLen <= limit)\n return false;\n for (let i = 0, start = 0; i < strLen; ++i) {\n if (str[i] === '\\n') {\n if (i - start > limit)\n return true;\n start = i + 1;\n if (strLen - start <= limit)\n return false;\n }\n }\n return true;\n}\nfunction doubleQuotedString(value, ctx) {\n const json = JSON.stringify(value);\n if (ctx.options.doubleQuotedAsJSON)\n return json;\n const { implicitKey } = ctx;\n const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n let str = '';\n let start = 0;\n for (let i = 0, ch = json[i]; ch; ch = json[++i]) {\n if (ch === ' ' && json[i + 1] === '\\\\' && json[i + 2] === 'n') {\n // space before newline needs to be escaped to not be folded\n str += json.slice(start, i) + '\\\\ ';\n i += 1;\n start = i;\n ch = '\\\\';\n }\n if (ch === '\\\\')\n switch (json[i + 1]) {\n case 'u':\n {\n str += json.slice(start, i);\n const code = json.substr(i + 2, 4);\n switch (code) {\n case '0000':\n str += '\\\\0';\n break;\n case '0007':\n str += '\\\\a';\n break;\n case '000b':\n str += '\\\\v';\n break;\n case '001b':\n str += '\\\\e';\n break;\n case '0085':\n str += '\\\\N';\n break;\n case '00a0':\n str += '\\\\_';\n break;\n case '2028':\n str += '\\\\L';\n break;\n case '2029':\n str += '\\\\P';\n break;\n default:\n if (code.substr(0, 2) === '00')\n str += '\\\\x' + code.substr(2);\n else\n str += json.substr(i, 6);\n }\n i += 5;\n start = i + 1;\n }\n break;\n case 'n':\n if (implicitKey ||\n json[i + 2] === '\"' ||\n json.length < minMultiLineLength) {\n i += 1;\n }\n else {\n // folding will eat first newline\n str += json.slice(start, i) + '\\n\\n';\n while (json[i + 2] === '\\\\' &&\n json[i + 3] === 'n' &&\n json[i + 4] !== '\"') {\n str += '\\n';\n i += 2;\n }\n str += indent;\n // space after newline needs to be escaped to not be folded\n if (json[i + 2] === ' ')\n str += '\\\\';\n i += 1;\n start = i + 1;\n }\n break;\n default:\n i += 1;\n }\n }\n str = start ? str + json.slice(start) : json;\n return implicitKey\n ? str\n : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx));\n}\nfunction singleQuotedString(value, ctx) {\n if (ctx.options.singleQuote === false ||\n (ctx.implicitKey && value.includes('\\n')) ||\n /[ \\t]\\n|\\n[ \\t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline\n )\n return doubleQuotedString(value, ctx);\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n const res = \"'\" + value.replace(/'/g, \"''\").replace(/\\n+/g, `$&\\n${indent}`) + \"'\";\n return ctx.implicitKey\n ? res\n : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx));\n}\nfunction quotedString(value, ctx) {\n const { singleQuote } = ctx.options;\n let qs;\n if (singleQuote === false)\n qs = doubleQuotedString;\n else {\n const hasDouble = value.includes('\"');\n const hasSingle = value.includes(\"'\");\n if (hasDouble && !hasSingle)\n qs = singleQuotedString;\n else if (hasSingle && !hasDouble)\n qs = doubleQuotedString;\n else\n qs = singleQuote ? singleQuotedString : doubleQuotedString;\n }\n return qs(value, ctx);\n}\nfunction blockString({ comment, type, value }, ctx, onComment, onChompKeep) {\n const { blockQuote, commentString, lineWidth } = ctx.options;\n // 1. Block can't end in whitespace unless the last line is non-empty.\n // 2. Strings consisting of only whitespace are best rendered explicitly.\n if (!blockQuote || /\\n[\\t ]+$/.test(value) || /^\\s*$/.test(value)) {\n return quotedString(value, ctx);\n }\n const indent = ctx.indent ||\n (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');\n const literal = blockQuote === 'literal'\n ? true\n : blockQuote === 'folded' || type === Scalar.Scalar.BLOCK_FOLDED\n ? false\n : type === Scalar.Scalar.BLOCK_LITERAL\n ? true\n : !lineLengthOverLimit(value, lineWidth, indent.length);\n if (!value)\n return literal ? '|\\n' : '>\\n';\n // determine chomping from whitespace at value end\n let chomp;\n let endStart;\n for (endStart = value.length; endStart > 0; --endStart) {\n const ch = value[endStart - 1];\n if (ch !== '\\n' && ch !== '\\t' && ch !== ' ')\n break;\n }\n let end = value.substring(endStart);\n const endNlPos = end.indexOf('\\n');\n if (endNlPos === -1) {\n chomp = '-'; // strip\n }\n else if (value === end || endNlPos !== end.length - 1) {\n chomp = '+'; // keep\n if (onChompKeep)\n onChompKeep();\n }\n else {\n chomp = ''; // clip\n }\n if (end) {\n value = value.slice(0, -end.length);\n if (end[end.length - 1] === '\\n')\n end = end.slice(0, -1);\n end = end.replace(/\\n+(?!\\n|$)/g, `$&${indent}`);\n }\n // determine indent indicator from whitespace at value start\n let startWithSpace = false;\n let startEnd;\n let startNlPos = -1;\n for (startEnd = 0; startEnd < value.length; ++startEnd) {\n const ch = value[startEnd];\n if (ch === ' ')\n startWithSpace = true;\n else if (ch === '\\n')\n startNlPos = startEnd;\n else\n break;\n }\n let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);\n if (start) {\n value = value.substring(start.length);\n start = start.replace(/\\n+/g, `$&${indent}`);\n }\n const indentSize = indent ? '2' : '1'; // root is at -1\n let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;\n if (comment) {\n header += ' ' + commentString(comment.replace(/ ?[\\r\\n]+/g, ' '));\n if (onComment)\n onComment();\n }\n if (literal) {\n value = value.replace(/\\n+/g, `$&${indent}`);\n return `${header}\\n${indent}${start}${value}${end}`;\n }\n value = value\n .replace(/\\n+/g, '\\n$&')\n .replace(/(?:^|\\n)([\\t ].*)(?:([\\n\\t ]*)\\n(?![\\n\\t ]))?/g, '$1$2') // more-indented lines aren't folded\n // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent\n .replace(/\\n+/g, `$&${indent}`);\n const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx));\n return `${header}\\n${indent}${body}`;\n}\nfunction plainString(item, ctx, onComment, onChompKeep) {\n const { type, value } = item;\n const { actualString, implicitKey, indent, indentStep, inFlow } = ctx;\n if ((implicitKey && /[\\n[\\]{},]/.test(value)) ||\n (inFlow && /[[\\]{},]/.test(value))) {\n return quotedString(value, ctx);\n }\n if (!value ||\n /^[\\n\\t ,[\\]{}#&*!|>'\"%@`]|^[?-]$|^[?-][ \\t]|[\\n:][ \\t]|[ \\t]\\n|[\\n\\t ]#|[\\n\\t :]$/.test(value)) {\n // not allowed:\n // - empty string, '-' or '?'\n // - start with an indicator character (except [?:-]) or /[?-] /\n // - '\\n ', ': ' or ' \\n' anywhere\n // - '#' not preceded by a non-space char\n // - end with ' ' or ':'\n return implicitKey || inFlow || !value.includes('\\n')\n ? quotedString(value, ctx)\n : blockString(item, ctx, onComment, onChompKeep);\n }\n if (!implicitKey &&\n !inFlow &&\n type !== Scalar.Scalar.PLAIN &&\n value.includes('\\n')) {\n // Where allowed & type not set explicitly, prefer block style for multiline strings\n return blockString(item, ctx, onComment, onChompKeep);\n }\n if (containsDocumentMarker(value)) {\n if (indent === '') {\n ctx.forceBlockIndent = true;\n return blockString(item, ctx, onComment, onChompKeep);\n }\n else if (implicitKey && indent === indentStep) {\n return quotedString(value, ctx);\n }\n }\n const str = value.replace(/\\n+/g, `$&\\n${indent}`);\n // Verify that output will be parsed as a string, as e.g. plain numbers and\n // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),\n // and others in v1.1.\n if (actualString) {\n const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str);\n const { compat, tags } = ctx.doc.schema;\n if (tags.some(test) || compat?.some(test))\n return quotedString(value, ctx);\n }\n return implicitKey\n ? str\n : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx));\n}\nfunction stringifyString(item, ctx, onComment, onChompKeep) {\n const { implicitKey, inFlow } = ctx;\n const ss = typeof item.value === 'string'\n ? item\n : Object.assign({}, item, { value: String(item.value) });\n let { type } = item;\n if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n // force double quotes on control characters & unpaired surrogates\n if (/[\\x00-\\x08\\x0b-\\x1f\\x7f-\\x9f\\u{D800}-\\u{DFFF}]/u.test(ss.value))\n type = Scalar.Scalar.QUOTE_DOUBLE;\n }\n const _stringify = (_type) => {\n switch (_type) {\n case Scalar.Scalar.BLOCK_FOLDED:\n case Scalar.Scalar.BLOCK_LITERAL:\n return implicitKey || inFlow\n ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers\n : blockString(ss, ctx, onComment, onChompKeep);\n case Scalar.Scalar.QUOTE_DOUBLE:\n return doubleQuotedString(ss.value, ctx);\n case Scalar.Scalar.QUOTE_SINGLE:\n return singleQuotedString(ss.value, ctx);\n case Scalar.Scalar.PLAIN:\n return plainString(ss, ctx, onComment, onChompKeep);\n default:\n return null;\n }\n };\n let res = _stringify(type);\n if (res === null) {\n const { defaultKeyType, defaultStringType } = ctx.options;\n const t = (implicitKey && defaultKeyType) || defaultStringType;\n res = _stringify(t);\n if (res === null)\n throw new Error(`Unsupported default string type ${t}`);\n }\n return res;\n}\n\nexports.stringifyString = stringifyString;\n","'use strict';\n\nvar Node = require('./nodes/Node.js');\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove node');\n/**\n * Apply a visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n * - `key`: For sequence values and map `Pair`, the node's index in the\n * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n * `null` for the root node.\n * - `node`: The current node.\n * - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this node, continue with next\n * sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current node, then continue with the next one\n * - `Node`: Replace the current node, then continue by visiting it\n * - `number`: While iterating the items of a sequence or map, set the index\n * of the next step. This is useful especially if the index of the current\n * node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nfunction visit(node, visitor) {\n const visitor_ = initVisitor(visitor);\n if (Node.isDocument(node)) {\n const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));\n if (cd === REMOVE)\n node.contents = null;\n }\n else\n visit_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisit.SKIP = SKIP;\n/** Remove the current node */\nvisit.REMOVE = REMOVE;\nfunction visit_(key, node, visitor, path) {\n const ctrl = callVisitor(key, node, visitor, path);\n if (Node.isNode(ctrl) || Node.isPair(ctrl)) {\n replaceNode(key, path, ctrl);\n return visit_(key, ctrl, visitor, path);\n }\n if (typeof ctrl !== 'symbol') {\n if (Node.isCollection(node)) {\n path = Object.freeze(path.concat(node));\n for (let i = 0; i < node.items.length; ++i) {\n const ci = visit_(i, node.items[i], visitor, path);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n node.items.splice(i, 1);\n i -= 1;\n }\n }\n }\n else if (Node.isPair(node)) {\n path = Object.freeze(path.concat(node));\n const ck = visit_('key', node.key, visitor, path);\n if (ck === BREAK)\n return BREAK;\n else if (ck === REMOVE)\n node.key = null;\n const cv = visit_('value', node.value, visitor, path);\n if (cv === BREAK)\n return BREAK;\n else if (cv === REMOVE)\n node.value = null;\n }\n }\n return ctrl;\n}\n/**\n * Apply an async visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n * - `key`: For sequence values and map `Pair`, the node's index in the\n * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n * `null` for the root node.\n * - `node`: The current node.\n * - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `Promise`: Must resolve to one of the following values\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this node, continue with next\n * sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current node, then continue with the next one\n * - `Node`: Replace the current node, then continue by visiting it\n * - `number`: While iterating the items of a sequence or map, set the index\n * of the next step. This is useful especially if the index of the current\n * node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nasync function visitAsync(node, visitor) {\n const visitor_ = initVisitor(visitor);\n if (Node.isDocument(node)) {\n const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));\n if (cd === REMOVE)\n node.contents = null;\n }\n else\n await visitAsync_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisitAsync.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisitAsync.SKIP = SKIP;\n/** Remove the current node */\nvisitAsync.REMOVE = REMOVE;\nasync function visitAsync_(key, node, visitor, path) {\n const ctrl = await callVisitor(key, node, visitor, path);\n if (Node.isNode(ctrl) || Node.isPair(ctrl)) {\n replaceNode(key, path, ctrl);\n return visitAsync_(key, ctrl, visitor, path);\n }\n if (typeof ctrl !== 'symbol') {\n if (Node.isCollection(node)) {\n path = Object.freeze(path.concat(node));\n for (let i = 0; i < node.items.length; ++i) {\n const ci = await visitAsync_(i, node.items[i], visitor, path);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n node.items.splice(i, 1);\n i -= 1;\n }\n }\n }\n else if (Node.isPair(node)) {\n path = Object.freeze(path.concat(node));\n const ck = await visitAsync_('key', node.key, visitor, path);\n if (ck === BREAK)\n return BREAK;\n else if (ck === REMOVE)\n node.key = null;\n const cv = await visitAsync_('value', node.value, visitor, path);\n if (cv === BREAK)\n return BREAK;\n else if (cv === REMOVE)\n node.value = null;\n }\n }\n return ctrl;\n}\nfunction initVisitor(visitor) {\n if (typeof visitor === 'object' &&\n (visitor.Collection || visitor.Node || visitor.Value)) {\n return Object.assign({\n Alias: visitor.Node,\n Map: visitor.Node,\n Scalar: visitor.Node,\n Seq: visitor.Node\n }, visitor.Value && {\n Map: visitor.Value,\n Scalar: visitor.Value,\n Seq: visitor.Value\n }, visitor.Collection && {\n Map: visitor.Collection,\n Seq: visitor.Collection\n }, visitor);\n }\n return visitor;\n}\nfunction callVisitor(key, node, visitor, path) {\n if (typeof visitor === 'function')\n return visitor(key, node, path);\n if (Node.isMap(node))\n return visitor.Map?.(key, node, path);\n if (Node.isSeq(node))\n return visitor.Seq?.(key, node, path);\n if (Node.isPair(node))\n return visitor.Pair?.(key, node, path);\n if (Node.isScalar(node))\n return visitor.Scalar?.(key, node, path);\n if (Node.isAlias(node))\n return visitor.Alias?.(key, node, path);\n return undefined;\n}\nfunction replaceNode(key, path, node) {\n const parent = path[path.length - 1];\n if (Node.isCollection(parent)) {\n parent.items[key] = node;\n }\n else if (Node.isPair(parent)) {\n if (key === 'key')\n parent.key = node;\n else\n parent.value = node;\n }\n else if (Node.isDocument(parent)) {\n parent.contents = node;\n }\n else {\n const pt = Node.isAlias(parent) ? 'alias' : 'scalar';\n throw new Error(`Cannot replace node with ${pt} parent`);\n }\n}\n\nexports.visit = visit;\nexports.visitAsync = visitAsync;\n","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","const core = require('@actions/core');\nconst fs = require('fs');\nconst glob = require('@actions/glob');\nconst path = require('path');\nconst yaml = require('yaml');\n\nconst sha1 = /\\b[a-f0-9]{40}\\b/i;\nconst sha256 = /\\b[A-Fa-f0-9]{64}\\b/i;\n\nasync function run() {\n try {\n const allowlist = core.getInput('allowlist');\n const isDryRun = core.getInput('dry_run') === 'true';\n const workflowsPath = process.env['ZG_WORKFLOWS_PATH'] || '.github/workflows';\n const globber = await glob.create([workflowsPath + '/*.yaml', workflowsPath + '/*.yml'].join('\\n'));\n let actionHasError = false;\n\n for await (const file of globber.globGenerator()) {\n const basename = path.basename(file);\n const fileContents = fs.readFileSync(file, 'utf8');\n const yamlContents = yaml.parse(fileContents);\n const jobs = yamlContents['jobs'];\n let fileHasError = false;\n\n if (jobs === undefined) {\n core.setFailed(`The \"${basename}\" workflow does not contain jobs.`);\n }\n\n core.startGroup(workflowsPath + '/' + basename);\n\n for (const job in jobs) {\n const uses = jobs[job]['uses'];\n const steps = jobs[job]['steps'];\n\n if (assertUsesVersion(uses)) {\n if (!assertUsesSha(uses) && !assertUsesAllowlist(uses, allowlist)) {\n actionHasError = true;\n fileHasError = true;\n\n reportError(`${uses} is not pinned to a full length commit SHA.`, isDryRun);\n }\n } else if (steps !== undefined) {\n for (const step of steps) {\n const uses = step['uses'];\n\n if (assertUsesVersion(uses) && !assertUsesSha(uses) && !assertUsesAllowlist(uses, allowlist)) {\n actionHasError = true;\n fileHasError = true;\n\n reportError(`${uses} is not pinned to a full length commit SHA.`, isDryRun);\n }\n }\n } else {\n core.warning(`The \"${job}\" job of the \"${basename}\" workflow does not contain uses or steps.`);\n }\n }\n\n if (!fileHasError) {\n core.info('No issues were found.')\n }\n\n core.endGroup();\n }\n\n if (!isDryRun && actionHasError) {\n throw new Error('At least one workflow contains an unpinned GitHub Action version.');\n }\n } catch (error) {\n core.setFailed(error.message);\n }\n}\n\nrun();\n\nfunction assertUsesVersion(uses) {\n return typeof uses === 'string' && uses.includes('@');\n}\n\nfunction assertUsesSha(uses) {\n if (uses.startsWith('docker://')) {\n return sha256.test(uses.substr(uses.indexOf('sha256:') + 7));\n }\n\n return sha1.test(uses.substr(uses.indexOf('@') + 1));\n}\n\nfunction assertUsesAllowlist(uses, allowlist) {\n if (!allowlist) {\n return false;\n }\n\n const action = uses.substr(0, uses.indexOf('@'));\n const isAllowed = allowlist.split(/\\r?\\n/).some((allow) => action.startsWith(allow));\n\n if(isAllowed) {\n core.info(`${action} matched allowlist — ignoring action.`)\n }\n\n return isAllowed;\n}\n\nfunction reportError(message, isDryRun) {\n if (isDryRun) {\n core.warning(message);\n } else {\n core.error(message);\n }\n}\n"],"names":[],"sourceRoot":""} \ No newline at end of file +{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9PA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACl7BA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5NA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9rBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACz7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;AC3OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA","sources":["../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/core/lib/command.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/core/lib/core.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/core/lib/file-command.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/core/lib/path-utils.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/core/lib/summary.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/core/lib/utils.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/glob.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-glob-options-helper.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-globber.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-hash-files.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-match-kind.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-path-helper.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-path.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-pattern-helper.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-pattern.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/glob/lib/internal-search-state.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/http-client/lib/auth.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/http-client/lib/index.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/@actions/http-client/lib/proxy.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/balanced-match/index.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/brace-expansion/index.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/concat-map/index.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/minimatch/minimatch.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/tunnel/index.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/tunnel/lib/tunnel.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/index.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/md5.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/nil.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/parse.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/regex.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/rng.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/sha1.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/stringify.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/v1.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/v3.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/v35.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/v4.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/v5.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/validate.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/uuid/dist/version.js","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"assert\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"crypto\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"events\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"fs\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"http\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"https\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"net\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"os\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"path\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"stream\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"tls\"","../webpack://github-actions-ensure-sha-pinned-actions/external node-commonjs \"util\"","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/compose-collection.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/compose-doc.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/compose-node.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/compose-scalar.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/composer.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/resolve-block-map.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/resolve-block-scalar.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/resolve-block-seq.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/resolve-end.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/resolve-flow-collection.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/resolve-flow-scalar.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/resolve-props.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/util-contains-newline.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/util-empty-scalar-position.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/util-flow-indent-check.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/compose/util-map-includes.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/doc/Document.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/doc/anchors.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/doc/applyReviver.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/doc/createNode.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/doc/directives.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/errors.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/index.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/log.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/Alias.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/Collection.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/Node.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/Pair.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/Scalar.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/YAMLMap.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/YAMLSeq.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/addPairToJSMap.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/nodes/toJS.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/parse/cst-scalar.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/parse/cst-stringify.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/parse/cst-visit.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/parse/cst.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/parse/lexer.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/parse/line-counter.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/parse/parser.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/public-api.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/Schema.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/common/map.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/common/null.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/common/seq.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/common/string.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/core/bool.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/core/float.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/core/int.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/core/schema.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/json/schema.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/tags.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/binary.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/bool.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/float.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/int.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/omap.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/pairs.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/schema.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/set.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/schema/yaml-1.1/timestamp.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/foldFlowLines.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/stringify.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/stringifyCollection.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/stringifyComment.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/stringifyDocument.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/stringifyNumber.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/stringifyPair.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/stringify/stringifyString.js","../webpack://github-actions-ensure-sha-pinned-actions/./node_modules/yaml/dist/visit.js","../webpack://github-actions-ensure-sha-pinned-actions/webpack/bootstrap","../webpack://github-actions-ensure-sha-pinned-actions/webpack/runtime/compat","../webpack://github-actions-ensure-sha-pinned-actions/./src/index.js"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));\n }\n command_1.issueCommand('set-env', { name }, convertedVal);\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueFileCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n if (options && options.trimWhitespace === false) {\n return inputs;\n }\n return inputs.map(input => input.trim());\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n const filePath = process.env['GITHUB_OUTPUT'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));\n }\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n const filePath = process.env['GITHUB_STATE'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));\n }\n command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.prepareKeyValueMessage = exports.issueFileCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst uuid_1 = require(\"uuid\");\nconst utils_1 = require(\"./utils\");\nfunction issueFileCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueFileCommand = issueFileCommand;\nfunction prepareKeyValueMessage(key, value) {\n const delimiter = `ghadelimiter_${uuid_1.v4()}`;\n const convertedValue = utils_1.toCommandValue(value);\n // These should realistically never happen, but just in case someone finds a\n // way to exploit uuid generation let's not allow keys or values that contain\n // the delimiter.\n if (key.includes(delimiter)) {\n throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`);\n }\n if (convertedValue.includes(delimiter)) {\n throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`);\n }\n return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;\n}\nexports.prepareKeyValueMessage = prepareKeyValueMessage;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.hashFiles = exports.create = void 0;\nconst internal_globber_1 = require(\"./internal-globber\");\nconst internal_hash_files_1 = require(\"./internal-hash-files\");\n/**\n * Constructs a globber\n *\n * @param patterns Patterns separated by newlines\n * @param options Glob options\n */\nfunction create(patterns, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield internal_globber_1.DefaultGlobber.create(patterns, options);\n });\n}\nexports.create = create;\n/**\n * Computes the sha256 hash of a glob\n *\n * @param patterns Patterns separated by newlines\n * @param currentWorkspace Workspace used when matching files\n * @param options Glob options\n * @param verbose Enables verbose logging\n */\nfunction hashFiles(patterns, currentWorkspace = '', options, verbose = false) {\n return __awaiter(this, void 0, void 0, function* () {\n let followSymbolicLinks = true;\n if (options && typeof options.followSymbolicLinks === 'boolean') {\n followSymbolicLinks = options.followSymbolicLinks;\n }\n const globber = yield create(patterns, { followSymbolicLinks });\n return internal_hash_files_1.hashFiles(globber, currentWorkspace, verbose);\n });\n}\nexports.hashFiles = hashFiles;\n//# sourceMappingURL=glob.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOptions = void 0;\nconst core = __importStar(require(\"@actions/core\"));\n/**\n * Returns a copy with defaults filled in.\n */\nfunction getOptions(copy) {\n const result = {\n followSymbolicLinks: true,\n implicitDescendants: true,\n matchDirectories: true,\n omitBrokenSymbolicLinks: true\n };\n if (copy) {\n if (typeof copy.followSymbolicLinks === 'boolean') {\n result.followSymbolicLinks = copy.followSymbolicLinks;\n core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);\n }\n if (typeof copy.implicitDescendants === 'boolean') {\n result.implicitDescendants = copy.implicitDescendants;\n core.debug(`implicitDescendants '${result.implicitDescendants}'`);\n }\n if (typeof copy.matchDirectories === 'boolean') {\n result.matchDirectories = copy.matchDirectories;\n core.debug(`matchDirectories '${result.matchDirectories}'`);\n }\n if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {\n result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;\n core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);\n }\n }\n return result;\n}\nexports.getOptions = getOptions;\n//# sourceMappingURL=internal-glob-options-helper.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __asyncValues = (this && this.__asyncValues) || function (o) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var m = o[Symbol.asyncIterator], i;\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\n};\nvar __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }\nvar __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\n function fulfill(value) { resume(\"next\", value); }\n function reject(value) { resume(\"throw\", value); }\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DefaultGlobber = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst fs = __importStar(require(\"fs\"));\nconst globOptionsHelper = __importStar(require(\"./internal-glob-options-helper\"));\nconst path = __importStar(require(\"path\"));\nconst patternHelper = __importStar(require(\"./internal-pattern-helper\"));\nconst internal_match_kind_1 = require(\"./internal-match-kind\");\nconst internal_pattern_1 = require(\"./internal-pattern\");\nconst internal_search_state_1 = require(\"./internal-search-state\");\nconst IS_WINDOWS = process.platform === 'win32';\nclass DefaultGlobber {\n constructor(options) {\n this.patterns = [];\n this.searchPaths = [];\n this.options = globOptionsHelper.getOptions(options);\n }\n getSearchPaths() {\n // Return a copy\n return this.searchPaths.slice();\n }\n glob() {\n var e_1, _a;\n return __awaiter(this, void 0, void 0, function* () {\n const result = [];\n try {\n for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {\n const itemPath = _c.value;\n result.push(itemPath);\n }\n }\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\n finally {\n try {\n if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);\n }\n finally { if (e_1) throw e_1.error; }\n }\n return result;\n });\n }\n globGenerator() {\n return __asyncGenerator(this, arguments, function* globGenerator_1() {\n // Fill in defaults options\n const options = globOptionsHelper.getOptions(this.options);\n // Implicit descendants?\n const patterns = [];\n for (const pattern of this.patterns) {\n patterns.push(pattern);\n if (options.implicitDescendants &&\n (pattern.trailingSeparator ||\n pattern.segments[pattern.segments.length - 1] !== '**')) {\n patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**')));\n }\n }\n // Push the search paths\n const stack = [];\n for (const searchPath of patternHelper.getSearchPaths(patterns)) {\n core.debug(`Search path '${searchPath}'`);\n // Exists?\n try {\n // Intentionally using lstat. Detection for broken symlink\n // will be performed later (if following symlinks).\n yield __await(fs.promises.lstat(searchPath));\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n continue;\n }\n throw err;\n }\n stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));\n }\n // Search\n const traversalChain = []; // used to detect cycles\n while (stack.length) {\n // Pop\n const item = stack.pop();\n // Match?\n const match = patternHelper.match(patterns, item.path);\n const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);\n if (!match && !partialMatch) {\n continue;\n }\n // Stat\n const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)\n // Broken symlink, or symlink cycle detected, or no longer exists\n );\n // Broken symlink, or symlink cycle detected, or no longer exists\n if (!stats) {\n continue;\n }\n // Directory\n if (stats.isDirectory()) {\n // Matched\n if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) {\n yield yield __await(item.path);\n }\n // Descend?\n else if (!partialMatch) {\n continue;\n }\n // Push the child items in reverse\n const childLevel = item.level + 1;\n const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));\n stack.push(...childItems.reverse());\n }\n // File\n else if (match & internal_match_kind_1.MatchKind.File) {\n yield yield __await(item.path);\n }\n }\n });\n }\n /**\n * Constructs a DefaultGlobber\n */\n static create(patterns, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const result = new DefaultGlobber(options);\n if (IS_WINDOWS) {\n patterns = patterns.replace(/\\r\\n/g, '\\n');\n patterns = patterns.replace(/\\r/g, '\\n');\n }\n const lines = patterns.split('\\n').map(x => x.trim());\n for (const line of lines) {\n // Empty or comment\n if (!line || line.startsWith('#')) {\n continue;\n }\n // Pattern\n else {\n result.patterns.push(new internal_pattern_1.Pattern(line));\n }\n }\n result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));\n return result;\n });\n }\n static stat(item, options, traversalChain) {\n return __awaiter(this, void 0, void 0, function* () {\n // Note:\n // `stat` returns info about the target of a symlink (or symlink chain)\n // `lstat` returns info about a symlink itself\n let stats;\n if (options.followSymbolicLinks) {\n try {\n // Use `stat` (following symlinks)\n stats = yield fs.promises.stat(item.path);\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n if (options.omitBrokenSymbolicLinks) {\n core.debug(`Broken symlink '${item.path}'`);\n return undefined;\n }\n throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);\n }\n throw err;\n }\n }\n else {\n // Use `lstat` (not following symlinks)\n stats = yield fs.promises.lstat(item.path);\n }\n // Note, isDirectory() returns false for the lstat of a symlink\n if (stats.isDirectory() && options.followSymbolicLinks) {\n // Get the realpath\n const realPath = yield fs.promises.realpath(item.path);\n // Fixup the traversal chain to match the item level\n while (traversalChain.length >= item.level) {\n traversalChain.pop();\n }\n // Test for a cycle\n if (traversalChain.some((x) => x === realPath)) {\n core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);\n return undefined;\n }\n // Update the traversal chain\n traversalChain.push(realPath);\n }\n return stats;\n });\n }\n}\nexports.DefaultGlobber = DefaultGlobber;\n//# sourceMappingURL=internal-globber.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __asyncValues = (this && this.__asyncValues) || function (o) {\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\n var m = o[Symbol.asyncIterator], i;\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.hashFiles = void 0;\nconst crypto = __importStar(require(\"crypto\"));\nconst core = __importStar(require(\"@actions/core\"));\nconst fs = __importStar(require(\"fs\"));\nconst stream = __importStar(require(\"stream\"));\nconst util = __importStar(require(\"util\"));\nconst path = __importStar(require(\"path\"));\nfunction hashFiles(globber, currentWorkspace, verbose = false) {\n var e_1, _a;\n var _b;\n return __awaiter(this, void 0, void 0, function* () {\n const writeDelegate = verbose ? core.info : core.debug;\n let hasMatch = false;\n const githubWorkspace = currentWorkspace\n ? currentWorkspace\n : (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();\n const result = crypto.createHash('sha256');\n let count = 0;\n try {\n for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {\n const file = _d.value;\n writeDelegate(file);\n if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {\n writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);\n continue;\n }\n if (fs.statSync(file).isDirectory()) {\n writeDelegate(`Skip directory '${file}'.`);\n continue;\n }\n const hash = crypto.createHash('sha256');\n const pipeline = util.promisify(stream.pipeline);\n yield pipeline(fs.createReadStream(file), hash);\n result.write(hash.digest());\n count++;\n if (!hasMatch) {\n hasMatch = true;\n }\n }\n }\n catch (e_1_1) { e_1 = { error: e_1_1 }; }\n finally {\n try {\n if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);\n }\n finally { if (e_1) throw e_1.error; }\n }\n result.end();\n if (hasMatch) {\n writeDelegate(`Found ${count} files to hash.`);\n return result.digest('hex');\n }\n else {\n writeDelegate(`No matches found for glob`);\n return '';\n }\n });\n}\nexports.hashFiles = hashFiles;\n//# sourceMappingURL=internal-hash-files.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.MatchKind = void 0;\n/**\n * Indicates whether a pattern matches a path\n */\nvar MatchKind;\n(function (MatchKind) {\n /** Not matched */\n MatchKind[MatchKind[\"None\"] = 0] = \"None\";\n /** Matched if the path is a directory */\n MatchKind[MatchKind[\"Directory\"] = 1] = \"Directory\";\n /** Matched if the path is a regular file */\n MatchKind[MatchKind[\"File\"] = 2] = \"File\";\n /** Matched */\n MatchKind[MatchKind[\"All\"] = 3] = \"All\";\n})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));\n//# sourceMappingURL=internal-match-kind.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;\nconst path = __importStar(require(\"path\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst IS_WINDOWS = process.platform === 'win32';\n/**\n * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.\n *\n * For example, on Linux/macOS:\n * - `/ => /`\n * - `/hello => /`\n *\n * For example, on Windows:\n * - `C:\\ => C:\\`\n * - `C:\\hello => C:\\`\n * - `C: => C:`\n * - `C:hello => C:`\n * - `\\ => \\`\n * - `\\hello => \\`\n * - `\\\\hello => \\\\hello`\n * - `\\\\hello\\world => \\\\hello\\world`\n */\nfunction dirname(p) {\n // Normalize slashes and trim unnecessary trailing slash\n p = safeTrimTrailingSeparator(p);\n // Windows UNC root, e.g. \\\\hello or \\\\hello\\world\n if (IS_WINDOWS && /^\\\\\\\\[^\\\\]+(\\\\[^\\\\]+)?$/.test(p)) {\n return p;\n }\n // Get dirname\n let result = path.dirname(p);\n // Trim trailing slash for Windows UNC root, e.g. \\\\hello\\world\\\n if (IS_WINDOWS && /^\\\\\\\\[^\\\\]+\\\\[^\\\\]+\\\\$/.test(result)) {\n result = safeTrimTrailingSeparator(result);\n }\n return result;\n}\nexports.dirname = dirname;\n/**\n * Roots the path if not already rooted. On Windows, relative roots like `\\`\n * or `C:` are expanded based on the current working directory.\n */\nfunction ensureAbsoluteRoot(root, itemPath) {\n assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);\n assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);\n // Already rooted\n if (hasAbsoluteRoot(itemPath)) {\n return itemPath;\n }\n // Windows\n if (IS_WINDOWS) {\n // Check for itemPath like C: or C:foo\n if (itemPath.match(/^[A-Z]:[^\\\\/]|^[A-Z]:$/i)) {\n let cwd = process.cwd();\n assert_1.default(cwd.match(/^[A-Z]:\\\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);\n // Drive letter matches cwd? Expand to cwd\n if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {\n // Drive only, e.g. C:\n if (itemPath.length === 2) {\n // Preserve specified drive letter case (upper or lower)\n return `${itemPath[0]}:\\\\${cwd.substr(3)}`;\n }\n // Drive + path, e.g. C:foo\n else {\n if (!cwd.endsWith('\\\\')) {\n cwd += '\\\\';\n }\n // Preserve specified drive letter case (upper or lower)\n return `${itemPath[0]}:\\\\${cwd.substr(3)}${itemPath.substr(2)}`;\n }\n }\n // Different drive\n else {\n return `${itemPath[0]}:\\\\${itemPath.substr(2)}`;\n }\n }\n // Check for itemPath like \\ or \\foo\n else if (normalizeSeparators(itemPath).match(/^\\\\$|^\\\\[^\\\\]/)) {\n const cwd = process.cwd();\n assert_1.default(cwd.match(/^[A-Z]:\\\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);\n return `${cwd[0]}:\\\\${itemPath.substr(1)}`;\n }\n }\n assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);\n // Otherwise ensure root ends with a separator\n if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\\\'))) {\n // Intentionally empty\n }\n else {\n // Append separator\n root += path.sep;\n }\n return root + itemPath;\n}\nexports.ensureAbsoluteRoot = ensureAbsoluteRoot;\n/**\n * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:\n * `\\\\hello\\share` and `C:\\hello` (and using alternate separator).\n */\nfunction hasAbsoluteRoot(itemPath) {\n assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);\n // Normalize separators\n itemPath = normalizeSeparators(itemPath);\n // Windows\n if (IS_WINDOWS) {\n // E.g. \\\\hello\\share or C:\\hello\n return itemPath.startsWith('\\\\\\\\') || /^[A-Z]:\\\\/i.test(itemPath);\n }\n // E.g. /hello\n return itemPath.startsWith('/');\n}\nexports.hasAbsoluteRoot = hasAbsoluteRoot;\n/**\n * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:\n * `\\`, `\\hello`, `\\\\hello\\share`, `C:`, and `C:\\hello` (and using alternate separator).\n */\nfunction hasRoot(itemPath) {\n assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);\n // Normalize separators\n itemPath = normalizeSeparators(itemPath);\n // Windows\n if (IS_WINDOWS) {\n // E.g. \\ or \\hello or \\\\hello\n // E.g. C: or C:\\hello\n return itemPath.startsWith('\\\\') || /^[A-Z]:/i.test(itemPath);\n }\n // E.g. /hello\n return itemPath.startsWith('/');\n}\nexports.hasRoot = hasRoot;\n/**\n * Removes redundant slashes and converts `/` to `\\` on Windows\n */\nfunction normalizeSeparators(p) {\n p = p || '';\n // Windows\n if (IS_WINDOWS) {\n // Convert slashes on Windows\n p = p.replace(/\\//g, '\\\\');\n // Remove redundant slashes\n const isUnc = /^\\\\\\\\+[^\\\\]/.test(p); // e.g. \\\\hello\n return (isUnc ? '\\\\' : '') + p.replace(/\\\\\\\\+/g, '\\\\'); // preserve leading \\\\ for UNC\n }\n // Remove redundant slashes\n return p.replace(/\\/\\/+/g, '/');\n}\nexports.normalizeSeparators = normalizeSeparators;\n/**\n * Normalizes the path separators and trims the trailing separator (when safe).\n * For example, `/foo/ => /foo` but `/ => /`\n */\nfunction safeTrimTrailingSeparator(p) {\n // Short-circuit if empty\n if (!p) {\n return '';\n }\n // Normalize separators\n p = normalizeSeparators(p);\n // No trailing slash\n if (!p.endsWith(path.sep)) {\n return p;\n }\n // Check '/' on Linux/macOS and '\\' on Windows\n if (p === path.sep) {\n return p;\n }\n // On Windows check if drive root. E.g. C:\\\n if (IS_WINDOWS && /^[A-Z]:\\\\$/i.test(p)) {\n return p;\n }\n // Otherwise trim trailing slash\n return p.substr(0, p.length - 1);\n}\nexports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;\n//# sourceMappingURL=internal-path-helper.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Path = void 0;\nconst path = __importStar(require(\"path\"));\nconst pathHelper = __importStar(require(\"./internal-path-helper\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst IS_WINDOWS = process.platform === 'win32';\n/**\n * Helper class for parsing paths into segments\n */\nclass Path {\n /**\n * Constructs a Path\n * @param itemPath Path or array of segments\n */\n constructor(itemPath) {\n this.segments = [];\n // String\n if (typeof itemPath === 'string') {\n assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);\n // Normalize slashes and trim unnecessary trailing slash\n itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);\n // Not rooted\n if (!pathHelper.hasRoot(itemPath)) {\n this.segments = itemPath.split(path.sep);\n }\n // Rooted\n else {\n // Add all segments, while not at the root\n let remaining = itemPath;\n let dir = pathHelper.dirname(remaining);\n while (dir !== remaining) {\n // Add the segment\n const basename = path.basename(remaining);\n this.segments.unshift(basename);\n // Truncate the last segment\n remaining = dir;\n dir = pathHelper.dirname(remaining);\n }\n // Remainder is the root\n this.segments.unshift(remaining);\n }\n }\n // Array\n else {\n // Must not be empty\n assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);\n // Each segment\n for (let i = 0; i < itemPath.length; i++) {\n let segment = itemPath[i];\n // Must not be empty\n assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);\n // Normalize slashes\n segment = pathHelper.normalizeSeparators(itemPath[i]);\n // Root segment\n if (i === 0 && pathHelper.hasRoot(segment)) {\n segment = pathHelper.safeTrimTrailingSeparator(segment);\n assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);\n this.segments.push(segment);\n }\n // All other segments\n else {\n // Must not contain slash\n assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);\n this.segments.push(segment);\n }\n }\n }\n }\n /**\n * Converts the path to it's string representation\n */\n toString() {\n // First segment\n let result = this.segments[0];\n // All others\n let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));\n for (let i = 1; i < this.segments.length; i++) {\n if (skipSlash) {\n skipSlash = false;\n }\n else {\n result += path.sep;\n }\n result += this.segments[i];\n }\n return result;\n }\n}\nexports.Path = Path;\n//# sourceMappingURL=internal-path.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.partialMatch = exports.match = exports.getSearchPaths = void 0;\nconst pathHelper = __importStar(require(\"./internal-path-helper\"));\nconst internal_match_kind_1 = require(\"./internal-match-kind\");\nconst IS_WINDOWS = process.platform === 'win32';\n/**\n * Given an array of patterns, returns an array of paths to search.\n * Duplicates and paths under other included paths are filtered out.\n */\nfunction getSearchPaths(patterns) {\n // Ignore negate patterns\n patterns = patterns.filter(x => !x.negate);\n // Create a map of all search paths\n const searchPathMap = {};\n for (const pattern of patterns) {\n const key = IS_WINDOWS\n ? pattern.searchPath.toUpperCase()\n : pattern.searchPath;\n searchPathMap[key] = 'candidate';\n }\n const result = [];\n for (const pattern of patterns) {\n // Check if already included\n const key = IS_WINDOWS\n ? pattern.searchPath.toUpperCase()\n : pattern.searchPath;\n if (searchPathMap[key] === 'included') {\n continue;\n }\n // Check for an ancestor search path\n let foundAncestor = false;\n let tempKey = key;\n let parent = pathHelper.dirname(tempKey);\n while (parent !== tempKey) {\n if (searchPathMap[parent]) {\n foundAncestor = true;\n break;\n }\n tempKey = parent;\n parent = pathHelper.dirname(tempKey);\n }\n // Include the search pattern in the result\n if (!foundAncestor) {\n result.push(pattern.searchPath);\n searchPathMap[key] = 'included';\n }\n }\n return result;\n}\nexports.getSearchPaths = getSearchPaths;\n/**\n * Matches the patterns against the path\n */\nfunction match(patterns, itemPath) {\n let result = internal_match_kind_1.MatchKind.None;\n for (const pattern of patterns) {\n if (pattern.negate) {\n result &= ~pattern.match(itemPath);\n }\n else {\n result |= pattern.match(itemPath);\n }\n }\n return result;\n}\nexports.match = match;\n/**\n * Checks whether to descend further into the directory\n */\nfunction partialMatch(patterns, itemPath) {\n return patterns.some(x => !x.negate && x.partialMatch(itemPath));\n}\nexports.partialMatch = partialMatch;\n//# sourceMappingURL=internal-pattern-helper.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Pattern = void 0;\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst pathHelper = __importStar(require(\"./internal-path-helper\"));\nconst assert_1 = __importDefault(require(\"assert\"));\nconst minimatch_1 = require(\"minimatch\");\nconst internal_match_kind_1 = require(\"./internal-match-kind\");\nconst internal_path_1 = require(\"./internal-path\");\nconst IS_WINDOWS = process.platform === 'win32';\nclass Pattern {\n constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {\n /**\n * Indicates whether matches should be excluded from the result set\n */\n this.negate = false;\n // Pattern overload\n let pattern;\n if (typeof patternOrNegate === 'string') {\n pattern = patternOrNegate.trim();\n }\n // Segments overload\n else {\n // Convert to pattern\n segments = segments || [];\n assert_1.default(segments.length, `Parameter 'segments' must not empty`);\n const root = Pattern.getLiteral(segments[0]);\n assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);\n pattern = new internal_path_1.Path(segments).toString().trim();\n if (patternOrNegate) {\n pattern = `!${pattern}`;\n }\n }\n // Negate\n while (pattern.startsWith('!')) {\n this.negate = !this.negate;\n pattern = pattern.substr(1).trim();\n }\n // Normalize slashes and ensures absolute root\n pattern = Pattern.fixupPattern(pattern, homedir);\n // Segments\n this.segments = new internal_path_1.Path(pattern).segments;\n // Trailing slash indicates the pattern should only match directories, not regular files\n this.trailingSeparator = pathHelper\n .normalizeSeparators(pattern)\n .endsWith(path.sep);\n pattern = pathHelper.safeTrimTrailingSeparator(pattern);\n // Search path (literal path prior to the first glob segment)\n let foundGlob = false;\n const searchSegments = this.segments\n .map(x => Pattern.getLiteral(x))\n .filter(x => !foundGlob && !(foundGlob = x === ''));\n this.searchPath = new internal_path_1.Path(searchSegments).toString();\n // Root RegExp (required when determining partial match)\n this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');\n this.isImplicitPattern = isImplicitPattern;\n // Create minimatch\n const minimatchOptions = {\n dot: true,\n nobrace: true,\n nocase: IS_WINDOWS,\n nocomment: true,\n noext: true,\n nonegate: true\n };\n pattern = IS_WINDOWS ? pattern.replace(/\\\\/g, '/') : pattern;\n this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions);\n }\n /**\n * Matches the pattern against the specified path\n */\n match(itemPath) {\n // Last segment is globstar?\n if (this.segments[this.segments.length - 1] === '**') {\n // Normalize slashes\n itemPath = pathHelper.normalizeSeparators(itemPath);\n // Append a trailing slash. Otherwise Minimatch will not match the directory immediately\n // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns\n // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.\n if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) {\n // Note, this is safe because the constructor ensures the pattern has an absolute root.\n // For example, formats like C: and C:foo on Windows are resolved to an absolute root.\n itemPath = `${itemPath}${path.sep}`;\n }\n }\n else {\n // Normalize slashes and trim unnecessary trailing slash\n itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);\n }\n // Match\n if (this.minimatch.match(itemPath)) {\n return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All;\n }\n return internal_match_kind_1.MatchKind.None;\n }\n /**\n * Indicates whether the pattern may match descendants of the specified path\n */\n partialMatch(itemPath) {\n // Normalize slashes and trim unnecessary trailing slash\n itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);\n // matchOne does not handle root path correctly\n if (pathHelper.dirname(itemPath) === itemPath) {\n return this.rootRegExp.test(itemPath);\n }\n return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\\\+/ : /\\/+/), this.minimatch.set[0], true);\n }\n /**\n * Escapes glob patterns within a path\n */\n static globEscape(s) {\n return (IS_WINDOWS ? s : s.replace(/\\\\/g, '\\\\\\\\')) // escape '\\' on Linux/macOS\n .replace(/(\\[)(?=[^/]+\\])/g, '[[]') // escape '[' when ']' follows within the path segment\n .replace(/\\?/g, '[?]') // escape '?'\n .replace(/\\*/g, '[*]'); // escape '*'\n }\n /**\n * Normalizes slashes and ensures absolute root\n */\n static fixupPattern(pattern, homedir) {\n // Empty\n assert_1.default(pattern, 'pattern cannot be empty');\n // Must not contain `.` segment, unless first segment\n // Must not contain `..` segment\n const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));\n assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);\n // Must not contain globs in root, e.g. Windows UNC path \\\\foo\\b*r\n assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);\n // Normalize slashes\n pattern = pathHelper.normalizeSeparators(pattern);\n // Replace leading `.` segment\n if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) {\n pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1);\n }\n // Replace leading `~` segment\n else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {\n homedir = homedir || os.homedir();\n assert_1.default(homedir, 'Unable to determine HOME directory');\n assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);\n pattern = Pattern.globEscape(homedir) + pattern.substr(1);\n }\n // Replace relative drive root, e.g. pattern is C: or C:foo\n else if (IS_WINDOWS &&\n (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\\\]/i))) {\n let root = pathHelper.ensureAbsoluteRoot('C:\\\\dummy-root', pattern.substr(0, 2));\n if (pattern.length > 2 && !root.endsWith('\\\\')) {\n root += '\\\\';\n }\n pattern = Pattern.globEscape(root) + pattern.substr(2);\n }\n // Replace relative root, e.g. pattern is \\ or \\foo\n else if (IS_WINDOWS && (pattern === '\\\\' || pattern.match(/^\\\\[^\\\\]/))) {\n let root = pathHelper.ensureAbsoluteRoot('C:\\\\dummy-root', '\\\\');\n if (!root.endsWith('\\\\')) {\n root += '\\\\';\n }\n pattern = Pattern.globEscape(root) + pattern.substr(1);\n }\n // Otherwise ensure absolute root\n else {\n pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern);\n }\n return pathHelper.normalizeSeparators(pattern);\n }\n /**\n * Attempts to unescape a pattern segment to create a literal path segment.\n * Otherwise returns empty string.\n */\n static getLiteral(segment) {\n let literal = '';\n for (let i = 0; i < segment.length; i++) {\n const c = segment[i];\n // Escape\n if (c === '\\\\' && !IS_WINDOWS && i + 1 < segment.length) {\n literal += segment[++i];\n continue;\n }\n // Wildcard\n else if (c === '*' || c === '?') {\n return '';\n }\n // Character set\n else if (c === '[' && i + 1 < segment.length) {\n let set = '';\n let closed = -1;\n for (let i2 = i + 1; i2 < segment.length; i2++) {\n const c2 = segment[i2];\n // Escape\n if (c2 === '\\\\' && !IS_WINDOWS && i2 + 1 < segment.length) {\n set += segment[++i2];\n continue;\n }\n // Closed\n else if (c2 === ']') {\n closed = i2;\n break;\n }\n // Otherwise\n else {\n set += c2;\n }\n }\n // Closed?\n if (closed >= 0) {\n // Cannot convert\n if (set.length > 1) {\n return '';\n }\n // Convert to literal\n if (set) {\n literal += set;\n i = closed;\n continue;\n }\n }\n // Otherwise fall thru\n }\n // Append\n literal += c;\n }\n return literal;\n }\n /**\n * Escapes regexp special characters\n * https://javascript.info/regexp-escaping\n */\n static regExpEscape(s) {\n return s.replace(/[[\\\\^$.|?*+()]/g, '\\\\$&');\n }\n}\nexports.Pattern = Pattern;\n//# sourceMappingURL=internal-pattern.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.SearchState = void 0;\nclass SearchState {\n constructor(path, level) {\n this.path = path;\n this.level = level;\n }\n}\nexports.SearchState = SearchState;\n//# sourceMappingURL=internal-search-state.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n//# sourceMappingURL=proxy.js.map","'use strict';\nmodule.exports = balanced;\nfunction balanced(a, b, str) {\n if (a instanceof RegExp) a = maybeMatch(a, str);\n if (b instanceof RegExp) b = maybeMatch(b, str);\n\n var r = range(a, b, str);\n\n return r && {\n start: r[0],\n end: r[1],\n pre: str.slice(0, r[0]),\n body: str.slice(r[0] + a.length, r[1]),\n post: str.slice(r[1] + b.length)\n };\n}\n\nfunction maybeMatch(reg, str) {\n var m = str.match(reg);\n return m ? m[0] : null;\n}\n\nbalanced.range = range;\nfunction range(a, b, str) {\n var begs, beg, left, right, result;\n var ai = str.indexOf(a);\n var bi = str.indexOf(b, ai + 1);\n var i = ai;\n\n if (ai >= 0 && bi > 0) {\n if(a===b) {\n return [ai, bi];\n }\n begs = [];\n left = str.length;\n\n while (i >= 0 && !result) {\n if (i == ai) {\n begs.push(i);\n ai = str.indexOf(a, i + 1);\n } else if (begs.length == 1) {\n result = [ begs.pop(), bi ];\n } else {\n beg = begs.pop();\n if (beg < left) {\n left = beg;\n right = bi;\n }\n\n bi = str.indexOf(b, i + 1);\n }\n\n i = ai < bi && ai >= 0 ? ai : bi;\n }\n\n if (begs.length) {\n result = [ left, right ];\n }\n }\n\n return result;\n}\n","var concatMap = require('concat-map');\nvar balanced = require('balanced-match');\n\nmodule.exports = expandTop;\n\nvar escSlash = '\\0SLASH'+Math.random()+'\\0';\nvar escOpen = '\\0OPEN'+Math.random()+'\\0';\nvar escClose = '\\0CLOSE'+Math.random()+'\\0';\nvar escComma = '\\0COMMA'+Math.random()+'\\0';\nvar escPeriod = '\\0PERIOD'+Math.random()+'\\0';\n\nfunction numeric(str) {\n return parseInt(str, 10) == str\n ? parseInt(str, 10)\n : str.charCodeAt(0);\n}\n\nfunction escapeBraces(str) {\n return str.split('\\\\\\\\').join(escSlash)\n .split('\\\\{').join(escOpen)\n .split('\\\\}').join(escClose)\n .split('\\\\,').join(escComma)\n .split('\\\\.').join(escPeriod);\n}\n\nfunction unescapeBraces(str) {\n return str.split(escSlash).join('\\\\')\n .split(escOpen).join('{')\n .split(escClose).join('}')\n .split(escComma).join(',')\n .split(escPeriod).join('.');\n}\n\n\n// Basically just str.split(\",\"), but handling cases\n// where we have nested braced sections, which should be\n// treated as individual members, like {a,{b,c},d}\nfunction parseCommaParts(str) {\n if (!str)\n return [''];\n\n var parts = [];\n var m = balanced('{', '}', str);\n\n if (!m)\n return str.split(',');\n\n var pre = m.pre;\n var body = m.body;\n var post = m.post;\n var p = pre.split(',');\n\n p[p.length-1] += '{' + body + '}';\n var postParts = parseCommaParts(post);\n if (post.length) {\n p[p.length-1] += postParts.shift();\n p.push.apply(p, postParts);\n }\n\n parts.push.apply(parts, p);\n\n return parts;\n}\n\nfunction expandTop(str) {\n if (!str)\n return [];\n\n // I don't know why Bash 4.3 does this, but it does.\n // Anything starting with {} will have the first two bytes preserved\n // but *only* at the top level, so {},a}b will not expand to anything,\n // but a{},b}c will be expanded to [a}c,abc].\n // One could argue that this is a bug in Bash, but since the goal of\n // this module is to match Bash's rules, we escape a leading {}\n if (str.substr(0, 2) === '{}') {\n str = '\\\\{\\\\}' + str.substr(2);\n }\n\n return expand(escapeBraces(str), true).map(unescapeBraces);\n}\n\nfunction identity(e) {\n return e;\n}\n\nfunction embrace(str) {\n return '{' + str + '}';\n}\nfunction isPadded(el) {\n return /^-?0\\d/.test(el);\n}\n\nfunction lte(i, y) {\n return i <= y;\n}\nfunction gte(i, y) {\n return i >= y;\n}\n\nfunction expand(str, isTop) {\n var expansions = [];\n\n var m = balanced('{', '}', str);\n if (!m || /\\$$/.test(m.pre)) return [str];\n\n var isNumericSequence = /^-?\\d+\\.\\.-?\\d+(?:\\.\\.-?\\d+)?$/.test(m.body);\n var isAlphaSequence = /^[a-zA-Z]\\.\\.[a-zA-Z](?:\\.\\.-?\\d+)?$/.test(m.body);\n var isSequence = isNumericSequence || isAlphaSequence;\n var isOptions = m.body.indexOf(',') >= 0;\n if (!isSequence && !isOptions) {\n // {a},b}\n if (m.post.match(/,.*\\}/)) {\n str = m.pre + '{' + m.body + escClose + m.post;\n return expand(str);\n }\n return [str];\n }\n\n var n;\n if (isSequence) {\n n = m.body.split(/\\.\\./);\n } else {\n n = parseCommaParts(m.body);\n if (n.length === 1) {\n // x{{a,b}}y ==> x{a}y x{b}y\n n = expand(n[0], false).map(embrace);\n if (n.length === 1) {\n var post = m.post.length\n ? expand(m.post, false)\n : [''];\n return post.map(function(p) {\n return m.pre + n[0] + p;\n });\n }\n }\n }\n\n // at this point, n is the parts, and we know it's not a comma set\n // with a single entry.\n\n // no need to expand pre, since it is guaranteed to be free of brace-sets\n var pre = m.pre;\n var post = m.post.length\n ? expand(m.post, false)\n : [''];\n\n var N;\n\n if (isSequence) {\n var x = numeric(n[0]);\n var y = numeric(n[1]);\n var width = Math.max(n[0].length, n[1].length)\n var incr = n.length == 3\n ? Math.abs(numeric(n[2]))\n : 1;\n var test = lte;\n var reverse = y < x;\n if (reverse) {\n incr *= -1;\n test = gte;\n }\n var pad = n.some(isPadded);\n\n N = [];\n\n for (var i = x; test(i, y); i += incr) {\n var c;\n if (isAlphaSequence) {\n c = String.fromCharCode(i);\n if (c === '\\\\')\n c = '';\n } else {\n c = String(i);\n if (pad) {\n var need = width - c.length;\n if (need > 0) {\n var z = new Array(need + 1).join('0');\n if (i < 0)\n c = '-' + z + c.slice(1);\n else\n c = z + c;\n }\n }\n }\n N.push(c);\n }\n } else {\n N = concatMap(n, function(el) { return expand(el, false) });\n }\n\n for (var j = 0; j < N.length; j++) {\n for (var k = 0; k < post.length; k++) {\n var expansion = pre + N[j] + post[k];\n if (!isTop || isSequence || expansion)\n expansions.push(expansion);\n }\n }\n\n return expansions;\n}\n\n","module.exports = function (xs, fn) {\n var res = [];\n for (var i = 0; i < xs.length; i++) {\n var x = fn(xs[i], i);\n if (isArray(x)) res.push.apply(res, x);\n else res.push(x);\n }\n return res;\n};\n\nvar isArray = Array.isArray || function (xs) {\n return Object.prototype.toString.call(xs) === '[object Array]';\n};\n","module.exports = minimatch\nminimatch.Minimatch = Minimatch\n\nvar path = (function () { try { return require('path') } catch (e) {}}()) || {\n sep: '/'\n}\nminimatch.sep = path.sep\n\nvar GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}\nvar expand = require('brace-expansion')\n\nvar plTypes = {\n '!': { open: '(?:(?!(?:', close: '))[^/]*?)'},\n '?': { open: '(?:', close: ')?' },\n '+': { open: '(?:', close: ')+' },\n '*': { open: '(?:', close: ')*' },\n '@': { open: '(?:', close: ')' }\n}\n\n// any single thing other than /\n// don't need to escape / when using new RegExp()\nvar qmark = '[^/]'\n\n// * => any number of characters\nvar star = qmark + '*?'\n\n// ** when dots are allowed. Anything goes, except .. and .\n// not (^ or / followed by one or two dots followed by $ or /),\n// followed by anything, any number of times.\nvar twoStarDot = '(?:(?!(?:\\\\\\/|^)(?:\\\\.{1,2})($|\\\\\\/)).)*?'\n\n// not a ^ or / followed by a dot,\n// followed by anything, any number of times.\nvar twoStarNoDot = '(?:(?!(?:\\\\\\/|^)\\\\.).)*?'\n\n// characters that need to be escaped in RegExp.\nvar reSpecials = charSet('().*{}+?[]^$\\\\!')\n\n// \"abc\" -> { a:true, b:true, c:true }\nfunction charSet (s) {\n return s.split('').reduce(function (set, c) {\n set[c] = true\n return set\n }, {})\n}\n\n// normalizes slashes.\nvar slashSplit = /\\/+/\n\nminimatch.filter = filter\nfunction filter (pattern, options) {\n options = options || {}\n return function (p, i, list) {\n return minimatch(p, pattern, options)\n }\n}\n\nfunction ext (a, b) {\n b = b || {}\n var t = {}\n Object.keys(a).forEach(function (k) {\n t[k] = a[k]\n })\n Object.keys(b).forEach(function (k) {\n t[k] = b[k]\n })\n return t\n}\n\nminimatch.defaults = function (def) {\n if (!def || typeof def !== 'object' || !Object.keys(def).length) {\n return minimatch\n }\n\n var orig = minimatch\n\n var m = function minimatch (p, pattern, options) {\n return orig(p, pattern, ext(def, options))\n }\n\n m.Minimatch = function Minimatch (pattern, options) {\n return new orig.Minimatch(pattern, ext(def, options))\n }\n m.Minimatch.defaults = function defaults (options) {\n return orig.defaults(ext(def, options)).Minimatch\n }\n\n m.filter = function filter (pattern, options) {\n return orig.filter(pattern, ext(def, options))\n }\n\n m.defaults = function defaults (options) {\n return orig.defaults(ext(def, options))\n }\n\n m.makeRe = function makeRe (pattern, options) {\n return orig.makeRe(pattern, ext(def, options))\n }\n\n m.braceExpand = function braceExpand (pattern, options) {\n return orig.braceExpand(pattern, ext(def, options))\n }\n\n m.match = function (list, pattern, options) {\n return orig.match(list, pattern, ext(def, options))\n }\n\n return m\n}\n\nMinimatch.defaults = function (def) {\n return minimatch.defaults(def).Minimatch\n}\n\nfunction minimatch (p, pattern, options) {\n assertValidPattern(pattern)\n\n if (!options) options = {}\n\n // shortcut: comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n return false\n }\n\n return new Minimatch(pattern, options).match(p)\n}\n\nfunction Minimatch (pattern, options) {\n if (!(this instanceof Minimatch)) {\n return new Minimatch(pattern, options)\n }\n\n assertValidPattern(pattern)\n\n if (!options) options = {}\n\n pattern = pattern.trim()\n\n // windows support: need to use /, not \\\n if (!options.allowWindowsEscape && path.sep !== '/') {\n pattern = pattern.split(path.sep).join('/')\n }\n\n this.options = options\n this.set = []\n this.pattern = pattern\n this.regexp = null\n this.negate = false\n this.comment = false\n this.empty = false\n this.partial = !!options.partial\n\n // make the set of regexps etc.\n this.make()\n}\n\nMinimatch.prototype.debug = function () {}\n\nMinimatch.prototype.make = make\nfunction make () {\n var pattern = this.pattern\n var options = this.options\n\n // empty patterns and comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n this.comment = true\n return\n }\n if (!pattern) {\n this.empty = true\n return\n }\n\n // step 1: figure out negation, etc.\n this.parseNegate()\n\n // step 2: expand braces\n var set = this.globSet = this.braceExpand()\n\n if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) }\n\n this.debug(this.pattern, set)\n\n // step 3: now we have a set, so turn each one into a series of path-portion\n // matching patterns.\n // These will be regexps, except in the case of \"**\", which is\n // set to the GLOBSTAR object for globstar behavior,\n // and will not contain any / characters\n set = this.globParts = set.map(function (s) {\n return s.split(slashSplit)\n })\n\n this.debug(this.pattern, set)\n\n // glob --> regexps\n set = set.map(function (s, si, set) {\n return s.map(this.parse, this)\n }, this)\n\n this.debug(this.pattern, set)\n\n // filter out everything that didn't compile properly.\n set = set.filter(function (s) {\n return s.indexOf(false) === -1\n })\n\n this.debug(this.pattern, set)\n\n this.set = set\n}\n\nMinimatch.prototype.parseNegate = parseNegate\nfunction parseNegate () {\n var pattern = this.pattern\n var negate = false\n var options = this.options\n var negateOffset = 0\n\n if (options.nonegate) return\n\n for (var i = 0, l = pattern.length\n ; i < l && pattern.charAt(i) === '!'\n ; i++) {\n negate = !negate\n negateOffset++\n }\n\n if (negateOffset) this.pattern = pattern.substr(negateOffset)\n this.negate = negate\n}\n\n// Brace expansion:\n// a{b,c}d -> abd acd\n// a{b,}c -> abc ac\n// a{0..3}d -> a0d a1d a2d a3d\n// a{b,c{d,e}f}g -> abg acdfg acefg\n// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg\n//\n// Invalid sets are not expanded.\n// a{2..}b -> a{2..}b\n// a{b}c -> a{b}c\nminimatch.braceExpand = function (pattern, options) {\n return braceExpand(pattern, options)\n}\n\nMinimatch.prototype.braceExpand = braceExpand\n\nfunction braceExpand (pattern, options) {\n if (!options) {\n if (this instanceof Minimatch) {\n options = this.options\n } else {\n options = {}\n }\n }\n\n pattern = typeof pattern === 'undefined'\n ? this.pattern : pattern\n\n assertValidPattern(pattern)\n\n // Thanks to Yeting Li for\n // improving this regexp to avoid a ReDOS vulnerability.\n if (options.nobrace || !/\\{(?:(?!\\{).)*\\}/.test(pattern)) {\n // shortcut. no need to expand.\n return [pattern]\n }\n\n return expand(pattern)\n}\n\nvar MAX_PATTERN_LENGTH = 1024 * 64\nvar assertValidPattern = function (pattern) {\n if (typeof pattern !== 'string') {\n throw new TypeError('invalid pattern')\n }\n\n if (pattern.length > MAX_PATTERN_LENGTH) {\n throw new TypeError('pattern is too long')\n }\n}\n\n// parse a component of the expanded set.\n// At this point, no pattern may contain \"/\" in it\n// so we're going to return a 2d array, where each entry is the full\n// pattern, split on '/', and then turned into a regular expression.\n// A regexp is made at the end which joins each array with an\n// escaped /, and another full one which joins each regexp with |.\n//\n// Following the lead of Bash 4.1, note that \"**\" only has special meaning\n// when it is the *only* thing in a path portion. Otherwise, any series\n// of * is equivalent to a single *. Globstar behavior is enabled by\n// default, and can be disabled by setting options.noglobstar.\nMinimatch.prototype.parse = parse\nvar SUBPARSE = {}\nfunction parse (pattern, isSub) {\n assertValidPattern(pattern)\n\n var options = this.options\n\n // shortcuts\n if (pattern === '**') {\n if (!options.noglobstar)\n return GLOBSTAR\n else\n pattern = '*'\n }\n if (pattern === '') return ''\n\n var re = ''\n var hasMagic = !!options.nocase\n var escaping = false\n // ? => one single character\n var patternListStack = []\n var negativeLists = []\n var stateChar\n var inClass = false\n var reClassStart = -1\n var classStart = -1\n // . and .. never match anything that doesn't start with .,\n // even when options.dot is set.\n var patternStart = pattern.charAt(0) === '.' ? '' // anything\n // not (start or / followed by . or .. followed by / or end)\n : options.dot ? '(?!(?:^|\\\\\\/)\\\\.{1,2}(?:$|\\\\\\/))'\n : '(?!\\\\.)'\n var self = this\n\n function clearStateChar () {\n if (stateChar) {\n // we had some state-tracking character\n // that wasn't consumed by this pass.\n switch (stateChar) {\n case '*':\n re += star\n hasMagic = true\n break\n case '?':\n re += qmark\n hasMagic = true\n break\n default:\n re += '\\\\' + stateChar\n break\n }\n self.debug('clearStateChar %j %j', stateChar, re)\n stateChar = false\n }\n }\n\n for (var i = 0, len = pattern.length, c\n ; (i < len) && (c = pattern.charAt(i))\n ; i++) {\n this.debug('%s\\t%s %s %j', pattern, i, re, c)\n\n // skip over any that are escaped.\n if (escaping && reSpecials[c]) {\n re += '\\\\' + c\n escaping = false\n continue\n }\n\n switch (c) {\n /* istanbul ignore next */\n case '/': {\n // completely not allowed, even escaped.\n // Should already be path-split by now.\n return false\n }\n\n case '\\\\':\n clearStateChar()\n escaping = true\n continue\n\n // the various stateChar values\n // for the \"extglob\" stuff.\n case '?':\n case '*':\n case '+':\n case '@':\n case '!':\n this.debug('%s\\t%s %s %j <-- stateChar', pattern, i, re, c)\n\n // all of those are literals inside a class, except that\n // the glob [!a] means [^a] in regexp\n if (inClass) {\n this.debug(' in class')\n if (c === '!' && i === classStart + 1) c = '^'\n re += c\n continue\n }\n\n // if we already have a stateChar, then it means\n // that there was something like ** or +? in there.\n // Handle the stateChar, then proceed with this one.\n self.debug('call clearStateChar %j', stateChar)\n clearStateChar()\n stateChar = c\n // if extglob is disabled, then +(asdf|foo) isn't a thing.\n // just clear the statechar *now*, rather than even diving into\n // the patternList stuff.\n if (options.noext) clearStateChar()\n continue\n\n case '(':\n if (inClass) {\n re += '('\n continue\n }\n\n if (!stateChar) {\n re += '\\\\('\n continue\n }\n\n patternListStack.push({\n type: stateChar,\n start: i - 1,\n reStart: re.length,\n open: plTypes[stateChar].open,\n close: plTypes[stateChar].close\n })\n // negation is (?:(?!js)[^/]*)\n re += stateChar === '!' ? '(?:(?!(?:' : '(?:'\n this.debug('plType %j %j', stateChar, re)\n stateChar = false\n continue\n\n case ')':\n if (inClass || !patternListStack.length) {\n re += '\\\\)'\n continue\n }\n\n clearStateChar()\n hasMagic = true\n var pl = patternListStack.pop()\n // negation is (?:(?!js)[^/]*)\n // The others are (?:)\n re += pl.close\n if (pl.type === '!') {\n negativeLists.push(pl)\n }\n pl.reEnd = re.length\n continue\n\n case '|':\n if (inClass || !patternListStack.length || escaping) {\n re += '\\\\|'\n escaping = false\n continue\n }\n\n clearStateChar()\n re += '|'\n continue\n\n // these are mostly the same in regexp and glob\n case '[':\n // swallow any state-tracking char before the [\n clearStateChar()\n\n if (inClass) {\n re += '\\\\' + c\n continue\n }\n\n inClass = true\n classStart = i\n reClassStart = re.length\n re += c\n continue\n\n case ']':\n // a right bracket shall lose its special\n // meaning and represent itself in\n // a bracket expression if it occurs\n // first in the list. -- POSIX.2 2.8.3.2\n if (i === classStart + 1 || !inClass) {\n re += '\\\\' + c\n escaping = false\n continue\n }\n\n // handle the case where we left a class open.\n // \"[z-a]\" is valid, equivalent to \"\\[z-a\\]\"\n // split where the last [ was, make sure we don't have\n // an invalid re. if so, re-walk the contents of the\n // would-be class to re-translate any characters that\n // were passed through as-is\n // TODO: It would probably be faster to determine this\n // without a try/catch and a new RegExp, but it's tricky\n // to do safely. For now, this is safe and works.\n var cs = pattern.substring(classStart + 1, i)\n try {\n RegExp('[' + cs + ']')\n } catch (er) {\n // not a valid class!\n var sp = this.parse(cs, SUBPARSE)\n re = re.substr(0, reClassStart) + '\\\\[' + sp[0] + '\\\\]'\n hasMagic = hasMagic || sp[1]\n inClass = false\n continue\n }\n\n // finish up the class.\n hasMagic = true\n inClass = false\n re += c\n continue\n\n default:\n // swallow any state char that wasn't consumed\n clearStateChar()\n\n if (escaping) {\n // no need\n escaping = false\n } else if (reSpecials[c]\n && !(c === '^' && inClass)) {\n re += '\\\\'\n }\n\n re += c\n\n } // switch\n } // for\n\n // handle the case where we left a class open.\n // \"[abc\" is valid, equivalent to \"\\[abc\"\n if (inClass) {\n // split where the last [ was, and escape it\n // this is a huge pita. We now have to re-walk\n // the contents of the would-be class to re-translate\n // any characters that were passed through as-is\n cs = pattern.substr(classStart + 1)\n sp = this.parse(cs, SUBPARSE)\n re = re.substr(0, reClassStart) + '\\\\[' + sp[0]\n hasMagic = hasMagic || sp[1]\n }\n\n // handle the case where we had a +( thing at the *end*\n // of the pattern.\n // each pattern list stack adds 3 chars, and we need to go through\n // and escape any | chars that were passed through as-is for the regexp.\n // Go through and escape them, taking care not to double-escape any\n // | chars that were already escaped.\n for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {\n var tail = re.slice(pl.reStart + pl.open.length)\n this.debug('setting tail', re, pl)\n // maybe some even number of \\, then maybe 1 \\, followed by a |\n tail = tail.replace(/((?:\\\\{2}){0,64})(\\\\?)\\|/g, function (_, $1, $2) {\n if (!$2) {\n // the | isn't already escaped, so escape it.\n $2 = '\\\\'\n }\n\n // need to escape all those slashes *again*, without escaping the\n // one that we need for escaping the | character. As it works out,\n // escaping an even number of slashes can be done by simply repeating\n // it exactly after itself. That's why this trick works.\n //\n // I am sorry that you have to see this.\n return $1 + $1 + $2 + '|'\n })\n\n this.debug('tail=%j\\n %s', tail, tail, pl, re)\n var t = pl.type === '*' ? star\n : pl.type === '?' ? qmark\n : '\\\\' + pl.type\n\n hasMagic = true\n re = re.slice(0, pl.reStart) + t + '\\\\(' + tail\n }\n\n // handle trailing things that only matter at the very end.\n clearStateChar()\n if (escaping) {\n // trailing \\\\\n re += '\\\\\\\\'\n }\n\n // only need to apply the nodot start if the re starts with\n // something that could conceivably capture a dot\n var addPatternStart = false\n switch (re.charAt(0)) {\n case '[': case '.': case '(': addPatternStart = true\n }\n\n // Hack to work around lack of negative lookbehind in JS\n // A pattern like: *.!(x).!(y|z) needs to ensure that a name\n // like 'a.xyz.yz' doesn't match. So, the first negative\n // lookahead, has to look ALL the way ahead, to the end of\n // the pattern.\n for (var n = negativeLists.length - 1; n > -1; n--) {\n var nl = negativeLists[n]\n\n var nlBefore = re.slice(0, nl.reStart)\n var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)\n var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)\n var nlAfter = re.slice(nl.reEnd)\n\n nlLast += nlAfter\n\n // Handle nested stuff like *(*.js|!(*.json)), where open parens\n // mean that we should *not* include the ) in the bit that is considered\n // \"after\" the negated section.\n var openParensBefore = nlBefore.split('(').length - 1\n var cleanAfter = nlAfter\n for (i = 0; i < openParensBefore; i++) {\n cleanAfter = cleanAfter.replace(/\\)[+*?]?/, '')\n }\n nlAfter = cleanAfter\n\n var dollar = ''\n if (nlAfter === '' && isSub !== SUBPARSE) {\n dollar = '$'\n }\n var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast\n re = newRe\n }\n\n // if the re is not \"\" at this point, then we need to make sure\n // it doesn't match against an empty path part.\n // Otherwise a/* will match a/, which it should not.\n if (re !== '' && hasMagic) {\n re = '(?=.)' + re\n }\n\n if (addPatternStart) {\n re = patternStart + re\n }\n\n // parsing just a piece of a larger pattern.\n if (isSub === SUBPARSE) {\n return [re, hasMagic]\n }\n\n // skip the regexp for non-magical patterns\n // unescape anything in it, though, so that it'll be\n // an exact match against a file etc.\n if (!hasMagic) {\n return globUnescape(pattern)\n }\n\n var flags = options.nocase ? 'i' : ''\n try {\n var regExp = new RegExp('^' + re + '$', flags)\n } catch (er) /* istanbul ignore next - should be impossible */ {\n // If it was an invalid regular expression, then it can't match\n // anything. This trick looks for a character after the end of\n // the string, which is of course impossible, except in multi-line\n // mode, but it's not a /m regex.\n return new RegExp('$.')\n }\n\n regExp._glob = pattern\n regExp._src = re\n\n return regExp\n}\n\nminimatch.makeRe = function (pattern, options) {\n return new Minimatch(pattern, options || {}).makeRe()\n}\n\nMinimatch.prototype.makeRe = makeRe\nfunction makeRe () {\n if (this.regexp || this.regexp === false) return this.regexp\n\n // at this point, this.set is a 2d array of partial\n // pattern strings, or \"**\".\n //\n // It's better to use .match(). This function shouldn't\n // be used, really, but it's pretty convenient sometimes,\n // when you just want to work with a regex.\n var set = this.set\n\n if (!set.length) {\n this.regexp = false\n return this.regexp\n }\n var options = this.options\n\n var twoStar = options.noglobstar ? star\n : options.dot ? twoStarDot\n : twoStarNoDot\n var flags = options.nocase ? 'i' : ''\n\n var re = set.map(function (pattern) {\n return pattern.map(function (p) {\n return (p === GLOBSTAR) ? twoStar\n : (typeof p === 'string') ? regExpEscape(p)\n : p._src\n }).join('\\\\\\/')\n }).join('|')\n\n // must match entire pattern\n // ending in a * or ** will make it less strict.\n re = '^(?:' + re + ')$'\n\n // can match anything, as long as it's not this.\n if (this.negate) re = '^(?!' + re + ').*$'\n\n try {\n this.regexp = new RegExp(re, flags)\n } catch (ex) /* istanbul ignore next - should be impossible */ {\n this.regexp = false\n }\n return this.regexp\n}\n\nminimatch.match = function (list, pattern, options) {\n options = options || {}\n var mm = new Minimatch(pattern, options)\n list = list.filter(function (f) {\n return mm.match(f)\n })\n if (mm.options.nonull && !list.length) {\n list.push(pattern)\n }\n return list\n}\n\nMinimatch.prototype.match = function match (f, partial) {\n if (typeof partial === 'undefined') partial = this.partial\n this.debug('match', f, this.pattern)\n // short-circuit in the case of busted things.\n // comments, etc.\n if (this.comment) return false\n if (this.empty) return f === ''\n\n if (f === '/' && partial) return true\n\n var options = this.options\n\n // windows: need to use /, not \\\n if (path.sep !== '/') {\n f = f.split(path.sep).join('/')\n }\n\n // treat the test path as a set of pathparts.\n f = f.split(slashSplit)\n this.debug(this.pattern, 'split', f)\n\n // just ONE of the pattern sets in this.set needs to match\n // in order for it to be valid. If negating, then just one\n // match means that we have failed.\n // Either way, return on the first hit.\n\n var set = this.set\n this.debug(this.pattern, 'set', set)\n\n // Find the basename of the path by looking for the last non-empty segment\n var filename\n var i\n for (i = f.length - 1; i >= 0; i--) {\n filename = f[i]\n if (filename) break\n }\n\n for (i = 0; i < set.length; i++) {\n var pattern = set[i]\n var file = f\n if (options.matchBase && pattern.length === 1) {\n file = [filename]\n }\n var hit = this.matchOne(file, pattern, partial)\n if (hit) {\n if (options.flipNegate) return true\n return !this.negate\n }\n }\n\n // didn't get any hits. this is success if it's a negative\n // pattern, failure otherwise.\n if (options.flipNegate) return false\n return this.negate\n}\n\n// set partial to true to test if, for example,\n// \"/a/b\" matches the start of \"/*/b/*/d\"\n// Partial means, if you run out of file before you run\n// out of pattern, then that's fine, as long as all\n// the parts match.\nMinimatch.prototype.matchOne = function (file, pattern, partial) {\n var options = this.options\n\n this.debug('matchOne',\n { 'this': this, file: file, pattern: pattern })\n\n this.debug('matchOne', file.length, pattern.length)\n\n for (var fi = 0,\n pi = 0,\n fl = file.length,\n pl = pattern.length\n ; (fi < fl) && (pi < pl)\n ; fi++, pi++) {\n this.debug('matchOne loop')\n var p = pattern[pi]\n var f = file[fi]\n\n this.debug(pattern, p, f)\n\n // should be impossible.\n // some invalid regexp stuff in the set.\n /* istanbul ignore if */\n if (p === false) return false\n\n if (p === GLOBSTAR) {\n this.debug('GLOBSTAR', [pattern, p, f])\n\n // \"**\"\n // a/**/b/**/c would match the following:\n // a/b/x/y/z/c\n // a/x/y/z/b/c\n // a/b/x/b/x/c\n // a/b/c\n // To do this, take the rest of the pattern after\n // the **, and see if it would match the file remainder.\n // If so, return success.\n // If not, the ** \"swallows\" a segment, and try again.\n // This is recursively awful.\n //\n // a/**/b/**/c matching a/b/x/y/z/c\n // - a matches a\n // - doublestar\n // - matchOne(b/x/y/z/c, b/**/c)\n // - b matches b\n // - doublestar\n // - matchOne(x/y/z/c, c) -> no\n // - matchOne(y/z/c, c) -> no\n // - matchOne(z/c, c) -> no\n // - matchOne(c, c) yes, hit\n var fr = fi\n var pr = pi + 1\n if (pr === pl) {\n this.debug('** at the end')\n // a ** at the end will just swallow the rest.\n // We have found a match.\n // however, it will not swallow /.x, unless\n // options.dot is set.\n // . and .. are *never* matched by **, for explosively\n // exponential reasons.\n for (; fi < fl; fi++) {\n if (file[fi] === '.' || file[fi] === '..' ||\n (!options.dot && file[fi].charAt(0) === '.')) return false\n }\n return true\n }\n\n // ok, let's see if we can swallow whatever we can.\n while (fr < fl) {\n var swallowee = file[fr]\n\n this.debug('\\nglobstar while', file, fr, pattern, pr, swallowee)\n\n // XXX remove this slice. Just pass the start index.\n if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {\n this.debug('globstar found match!', fr, fl, swallowee)\n // found a match.\n return true\n } else {\n // can't swallow \".\" or \"..\" ever.\n // can only swallow \".foo\" when explicitly asked.\n if (swallowee === '.' || swallowee === '..' ||\n (!options.dot && swallowee.charAt(0) === '.')) {\n this.debug('dot detected!', file, fr, pattern, pr)\n break\n }\n\n // ** swallows a segment, and continue.\n this.debug('globstar swallow a segment, and continue')\n fr++\n }\n }\n\n // no match was found.\n // However, in partial mode, we can't say this is necessarily over.\n // If there's more *pattern* left, then\n /* istanbul ignore if */\n if (partial) {\n // ran out of file\n this.debug('\\n>>> no match, partial?', file, fr, pattern, pr)\n if (fr === fl) return true\n }\n return false\n }\n\n // something other than **\n // non-magic patterns just have to match exactly\n // patterns with magic have been turned into regexps.\n var hit\n if (typeof p === 'string') {\n hit = f === p\n this.debug('string match', p, f, hit)\n } else {\n hit = f.match(p)\n this.debug('pattern match', p, f, hit)\n }\n\n if (!hit) return false\n }\n\n // Note: ending in / means that we'll get a final \"\"\n // at the end of the pattern. This can only match a\n // corresponding \"\" at the end of the file.\n // If the file ends in /, then it can only match a\n // a pattern that ends in /, unless the pattern just\n // doesn't have any more for it. But, a/b/ should *not*\n // match \"a/b/*\", even though \"\" matches against the\n // [^/]*? pattern, except in partial mode, where it might\n // simply not be reached yet.\n // However, a/b/ should still satisfy a/*\n\n // now either we fell off the end of the pattern, or we're done.\n if (fi === fl && pi === pl) {\n // ran out of pattern and filename at the same time.\n // an exact hit!\n return true\n } else if (fi === fl) {\n // ran out of file, but still had pattern left.\n // this is ok if we're doing the match as part of\n // a glob fs traversal.\n return partial\n } else /* istanbul ignore else */ if (pi === pl) {\n // ran out of pattern, still have file left.\n // this is only acceptable if we're on the very last\n // empty segment of a file with a trailing slash.\n // a/* should match a/b/\n return (fi === fl - 1) && (file[fi] === '')\n }\n\n // should be unreachable.\n /* istanbul ignore next */\n throw new Error('wtf?')\n}\n\n// replace stuff like \\* with *\nfunction globUnescape (s) {\n return s.replace(/\\\\(.)/g, '$1')\n}\n\nfunction regExpEscape (s) {\n return s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&')\n}\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.default)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = _default;\nexports.URL = exports.DNS = void 0;\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction _default(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nvar _default = version;\nexports.default = _default;","module.exports = require(\"assert\");","module.exports = require(\"crypto\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"stream\");","module.exports = require(\"tls\");","module.exports = require(\"util\");","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveBlockMap = require('./resolve-block-map.js');\nvar resolveBlockSeq = require('./resolve-block-seq.js');\nvar resolveFlowCollection = require('./resolve-flow-collection.js');\n\nfunction composeCollection(CN, ctx, token, tagToken, onError) {\n let coll;\n switch (token.type) {\n case 'block-map': {\n coll = resolveBlockMap.resolveBlockMap(CN, ctx, token, onError);\n break;\n }\n case 'block-seq': {\n coll = resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError);\n break;\n }\n case 'flow-collection': {\n coll = resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError);\n break;\n }\n }\n if (!tagToken)\n return coll;\n const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));\n if (!tagName)\n return coll;\n // Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841\n const Coll = coll.constructor;\n if (tagName === '!' || tagName === Coll.tagName) {\n coll.tag = Coll.tagName;\n return coll;\n }\n const expType = Node.isMap(coll) ? 'map' : 'seq';\n let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName);\n if (!tag) {\n const kt = ctx.schema.knownTags[tagName];\n if (kt && kt.collection === expType) {\n ctx.schema.tags.push(Object.assign({}, kt, { default: false }));\n tag = kt;\n }\n else {\n onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);\n coll.tag = tagName;\n return coll;\n }\n }\n const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options);\n const node = Node.isNode(res)\n ? res\n : new Scalar.Scalar(res);\n node.range = coll.range;\n node.tag = tagName;\n if (tag?.format)\n node.format = tag.format;\n return node;\n}\n\nexports.composeCollection = composeCollection;\n","'use strict';\n\nvar Document = require('../doc/Document.js');\nvar composeNode = require('./compose-node.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\n\nfunction composeDoc(options, directives, { offset, start, value, end }, onError) {\n const opts = Object.assign({ _directives: directives }, options);\n const doc = new Document.Document(undefined, opts);\n const ctx = {\n atRoot: true,\n directives: doc.directives,\n options: doc.options,\n schema: doc.schema\n };\n const props = resolveProps.resolveProps(start, {\n indicator: 'doc-start',\n next: value ?? end?.[0],\n offset,\n onError,\n startOnNewline: true\n });\n if (props.found) {\n doc.directives.docStart = true;\n if (value &&\n (value.type === 'block-map' || value.type === 'block-seq') &&\n !props.hasNewline)\n onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');\n }\n doc.contents = value\n ? composeNode.composeNode(ctx, value, props, onError)\n : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError);\n const contentEnd = doc.contents.range[2];\n const re = resolveEnd.resolveEnd(end, contentEnd, false, onError);\n if (re.comment)\n doc.comment = re.comment;\n doc.range = [offset, contentEnd, re.offset];\n return doc;\n}\n\nexports.composeDoc = composeDoc;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar composeCollection = require('./compose-collection.js');\nvar composeScalar = require('./compose-scalar.js');\nvar resolveEnd = require('./resolve-end.js');\nvar utilEmptyScalarPosition = require('./util-empty-scalar-position.js');\n\nconst CN = { composeNode, composeEmptyNode };\nfunction composeNode(ctx, token, props, onError) {\n const { spaceBefore, comment, anchor, tag } = props;\n let node;\n let isSrcToken = true;\n switch (token.type) {\n case 'alias':\n node = composeAlias(ctx, token, onError);\n if (anchor || tag)\n onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');\n break;\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n case 'block-scalar':\n node = composeScalar.composeScalar(ctx, token, tag, onError);\n if (anchor)\n node.anchor = anchor.source.substring(1);\n break;\n case 'block-map':\n case 'block-seq':\n case 'flow-collection':\n node = composeCollection.composeCollection(CN, ctx, token, tag, onError);\n if (anchor)\n node.anchor = anchor.source.substring(1);\n break;\n default: {\n const message = token.type === 'error'\n ? token.message\n : `Unsupported token (type: ${token.type})`;\n onError(token, 'UNEXPECTED_TOKEN', message);\n node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);\n isSrcToken = false;\n }\n }\n if (anchor && node.anchor === '')\n onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n if (spaceBefore)\n node.spaceBefore = true;\n if (comment) {\n if (token.type === 'scalar' && token.source === '')\n node.comment = comment;\n else\n node.commentBefore = comment;\n }\n // @ts-expect-error Type checking misses meaning of isSrcToken\n if (ctx.options.keepSourceTokens && isSrcToken)\n node.srcToken = token;\n return node;\n}\nfunction composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) {\n const token = {\n type: 'scalar',\n offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos),\n indent: -1,\n source: ''\n };\n const node = composeScalar.composeScalar(ctx, token, tag, onError);\n if (anchor) {\n node.anchor = anchor.source.substring(1);\n if (node.anchor === '')\n onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n }\n if (spaceBefore)\n node.spaceBefore = true;\n if (comment) {\n node.comment = comment;\n node.range[2] = end;\n }\n return node;\n}\nfunction composeAlias({ options }, { offset, source, end }, onError) {\n const alias = new Alias.Alias(source.substring(1));\n if (alias.source === '')\n onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');\n if (alias.source.endsWith(':'))\n onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);\n const valueEnd = offset + source.length;\n const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError);\n alias.range = [offset, valueEnd, re.offset];\n if (re.comment)\n alias.comment = re.comment;\n return alias;\n}\n\nexports.composeEmptyNode = composeEmptyNode;\nexports.composeNode = composeNode;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveBlockScalar = require('./resolve-block-scalar.js');\nvar resolveFlowScalar = require('./resolve-flow-scalar.js');\n\nfunction composeScalar(ctx, token, tagToken, onError) {\n const { value, type, comment, range } = token.type === 'block-scalar'\n ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError)\n : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError);\n const tagName = tagToken\n ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))\n : null;\n const tag = tagToken && tagName\n ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)\n : token.type === 'scalar'\n ? findScalarTagByTest(ctx, value, token, onError)\n : ctx.schema[Node.SCALAR];\n let scalar;\n try {\n const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);\n scalar = Node.isScalar(res) ? res : new Scalar.Scalar(res);\n }\n catch (error) {\n const msg = error instanceof Error ? error.message : String(error);\n onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg);\n scalar = new Scalar.Scalar(value);\n }\n scalar.range = range;\n scalar.source = value;\n if (type)\n scalar.type = type;\n if (tagName)\n scalar.tag = tagName;\n if (tag.format)\n scalar.format = tag.format;\n if (comment)\n scalar.comment = comment;\n return scalar;\n}\nfunction findScalarTagByName(schema, value, tagName, tagToken, onError) {\n if (tagName === '!')\n return schema[Node.SCALAR]; // non-specific tag\n const matchWithTest = [];\n for (const tag of schema.tags) {\n if (!tag.collection && tag.tag === tagName) {\n if (tag.default && tag.test)\n matchWithTest.push(tag);\n else\n return tag;\n }\n }\n for (const tag of matchWithTest)\n if (tag.test?.test(value))\n return tag;\n const kt = schema.knownTags[tagName];\n if (kt && !kt.collection) {\n // Ensure that the known tag is available for stringifying,\n // but does not get used by default.\n schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));\n return kt;\n }\n onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');\n return schema[Node.SCALAR];\n}\nfunction findScalarTagByTest({ directives, schema }, value, token, onError) {\n const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[Node.SCALAR];\n if (schema.compat) {\n const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??\n schema[Node.SCALAR];\n if (tag.tag !== compat.tag) {\n const ts = directives.tagString(tag.tag);\n const cs = directives.tagString(compat.tag);\n const msg = `Value may be parsed as either ${ts} or ${cs}`;\n onError(token, 'TAG_RESOLVE_FAILED', msg, true);\n }\n }\n return tag;\n}\n\nexports.composeScalar = composeScalar;\n","'use strict';\n\nvar directives = require('../doc/directives.js');\nvar Document = require('../doc/Document.js');\nvar errors = require('../errors.js');\nvar Node = require('../nodes/Node.js');\nvar composeDoc = require('./compose-doc.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction getErrorPos(src) {\n if (typeof src === 'number')\n return [src, src + 1];\n if (Array.isArray(src))\n return src.length === 2 ? src : [src[0], src[1]];\n const { offset, source } = src;\n return [offset, offset + (typeof source === 'string' ? source.length : 1)];\n}\nfunction parsePrelude(prelude) {\n let comment = '';\n let atComment = false;\n let afterEmptyLine = false;\n for (let i = 0; i < prelude.length; ++i) {\n const source = prelude[i];\n switch (source[0]) {\n case '#':\n comment +=\n (comment === '' ? '' : afterEmptyLine ? '\\n\\n' : '\\n') +\n (source.substring(1) || ' ');\n atComment = true;\n afterEmptyLine = false;\n break;\n case '%':\n if (prelude[i + 1]?.[0] !== '#')\n i += 1;\n atComment = false;\n break;\n default:\n // This may be wrong after doc-end, but in that case it doesn't matter\n if (!atComment)\n afterEmptyLine = true;\n atComment = false;\n }\n }\n return { comment, afterEmptyLine };\n}\n/**\n * Compose a stream of CST nodes into a stream of YAML Documents.\n *\n * ```ts\n * import { Composer, Parser } from 'yaml'\n *\n * const src: string = ...\n * const tokens = new Parser().parse(src)\n * const docs = new Composer().compose(tokens)\n * ```\n */\nclass Composer {\n constructor(options = {}) {\n this.doc = null;\n this.atDirectives = false;\n this.prelude = [];\n this.errors = [];\n this.warnings = [];\n this.onError = (source, code, message, warning) => {\n const pos = getErrorPos(source);\n if (warning)\n this.warnings.push(new errors.YAMLWarning(pos, code, message));\n else\n this.errors.push(new errors.YAMLParseError(pos, code, message));\n };\n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n this.directives = new directives.Directives({ version: options.version || '1.2' });\n this.options = options;\n }\n decorate(doc, afterDoc) {\n const { comment, afterEmptyLine } = parsePrelude(this.prelude);\n //console.log({ dc: doc.comment, prelude, comment })\n if (comment) {\n const dc = doc.contents;\n if (afterDoc) {\n doc.comment = doc.comment ? `${doc.comment}\\n${comment}` : comment;\n }\n else if (afterEmptyLine || doc.directives.docStart || !dc) {\n doc.commentBefore = comment;\n }\n else if (Node.isCollection(dc) && !dc.flow && dc.items.length > 0) {\n let it = dc.items[0];\n if (Node.isPair(it))\n it = it.key;\n const cb = it.commentBefore;\n it.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n }\n else {\n const cb = dc.commentBefore;\n dc.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n }\n }\n if (afterDoc) {\n Array.prototype.push.apply(doc.errors, this.errors);\n Array.prototype.push.apply(doc.warnings, this.warnings);\n }\n else {\n doc.errors = this.errors;\n doc.warnings = this.warnings;\n }\n this.prelude = [];\n this.errors = [];\n this.warnings = [];\n }\n /**\n * Current stream status information.\n *\n * Mostly useful at the end of input for an empty stream.\n */\n streamInfo() {\n return {\n comment: parsePrelude(this.prelude).comment,\n directives: this.directives,\n errors: this.errors,\n warnings: this.warnings\n };\n }\n /**\n * Compose tokens into documents.\n *\n * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n */\n *compose(tokens, forceDoc = false, endOffset = -1) {\n for (const token of tokens)\n yield* this.next(token);\n yield* this.end(forceDoc, endOffset);\n }\n /** Advance the composer by one CST token. */\n *next(token) {\n if (process.env.LOG_STREAM)\n console.dir(token, { depth: null });\n switch (token.type) {\n case 'directive':\n this.directives.add(token.source, (offset, message, warning) => {\n const pos = getErrorPos(token);\n pos[0] += offset;\n this.onError(pos, 'BAD_DIRECTIVE', message, warning);\n });\n this.prelude.push(token.source);\n this.atDirectives = true;\n break;\n case 'document': {\n const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError);\n if (this.atDirectives && !doc.directives.docStart)\n this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');\n this.decorate(doc, false);\n if (this.doc)\n yield this.doc;\n this.doc = doc;\n this.atDirectives = false;\n break;\n }\n case 'byte-order-mark':\n case 'space':\n break;\n case 'comment':\n case 'newline':\n this.prelude.push(token.source);\n break;\n case 'error': {\n const msg = token.source\n ? `${token.message}: ${JSON.stringify(token.source)}`\n : token.message;\n const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);\n if (this.atDirectives || !this.doc)\n this.errors.push(error);\n else\n this.doc.errors.push(error);\n break;\n }\n case 'doc-end': {\n if (!this.doc) {\n const msg = 'Unexpected doc-end without preceding document';\n this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));\n break;\n }\n this.doc.directives.docEnd = true;\n const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);\n this.decorate(this.doc, true);\n if (end.comment) {\n const dc = this.doc.comment;\n this.doc.comment = dc ? `${dc}\\n${end.comment}` : end.comment;\n }\n this.doc.range[2] = end.offset;\n break;\n }\n default:\n this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));\n }\n }\n /**\n * Call at end of input to yield any remaining document.\n *\n * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n */\n *end(forceDoc = false, endOffset = -1) {\n if (this.doc) {\n this.decorate(this.doc, true);\n yield this.doc;\n this.doc = null;\n }\n else if (forceDoc) {\n const opts = Object.assign({ _directives: this.directives }, this.options);\n const doc = new Document.Document(undefined, opts);\n if (this.atDirectives)\n this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');\n doc.range = [0, endOffset, endOffset];\n this.decorate(doc, false);\n yield doc;\n }\n }\n}\n\nexports.Composer = Composer;\n","'use strict';\n\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst startColMsg = 'All mapping items must start at the same column';\nfunction resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) {\n const map = new YAMLMap.YAMLMap(ctx.schema);\n if (ctx.atRoot)\n ctx.atRoot = false;\n let offset = bm.offset;\n let commentEnd = null;\n for (const collItem of bm.items) {\n const { start, key, sep, value } = collItem;\n // key properties\n const keyProps = resolveProps.resolveProps(start, {\n indicator: 'explicit-key-ind',\n next: key ?? sep?.[0],\n offset,\n onError,\n startOnNewline: true\n });\n const implicitKey = !keyProps.found;\n if (implicitKey) {\n if (key) {\n if (key.type === 'block-seq')\n onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');\n else if ('indent' in key && key.indent !== bm.indent)\n onError(offset, 'BAD_INDENT', startColMsg);\n }\n if (!keyProps.anchor && !keyProps.tag && !sep) {\n commentEnd = keyProps.end;\n if (keyProps.comment) {\n if (map.comment)\n map.comment += '\\n' + keyProps.comment;\n else\n map.comment = keyProps.comment;\n }\n continue;\n }\n if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) {\n onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');\n }\n }\n else if (keyProps.found?.indent !== bm.indent) {\n onError(offset, 'BAD_INDENT', startColMsg);\n }\n // key value\n const keyStart = keyProps.end;\n const keyNode = key\n ? composeNode(ctx, key, keyProps, onError)\n : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError);\n if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n // value properties\n const valueProps = resolveProps.resolveProps(sep ?? [], {\n indicator: 'map-value-ind',\n next: value,\n offset: keyNode.range[2],\n onError,\n startOnNewline: !key || key.type === 'block-scalar'\n });\n offset = valueProps.end;\n if (valueProps.found) {\n if (implicitKey) {\n if (value?.type === 'block-map' && !valueProps.hasNewline)\n onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');\n if (ctx.options.strict &&\n keyProps.start < valueProps.found.offset - 1024)\n onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');\n }\n // value value\n const valueNode = value\n ? composeNode(ctx, value, valueProps, onError)\n : composeEmptyNode(ctx, offset, sep, null, valueProps, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError);\n offset = valueNode.range[2];\n const pair = new Pair.Pair(keyNode, valueNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n map.items.push(pair);\n }\n else {\n // key with no value\n if (implicitKey)\n onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');\n if (valueProps.comment) {\n if (keyNode.comment)\n keyNode.comment += '\\n' + valueProps.comment;\n else\n keyNode.comment = valueProps.comment;\n }\n const pair = new Pair.Pair(keyNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n map.items.push(pair);\n }\n }\n if (commentEnd && commentEnd < offset)\n onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content');\n map.range = [bm.offset, offset, commentEnd ?? offset];\n return map;\n}\n\nexports.resolveBlockMap = resolveBlockMap;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\n\nfunction resolveBlockScalar(scalar, strict, onError) {\n const start = scalar.offset;\n const header = parseBlockScalarHeader(scalar, strict, onError);\n if (!header)\n return { value: '', type: null, comment: '', range: [start, start, start] };\n const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL;\n const lines = scalar.source ? splitLines(scalar.source) : [];\n // determine the end of content & start of chomping\n let chompStart = lines.length;\n for (let i = lines.length - 1; i >= 0; --i) {\n const content = lines[i][1];\n if (content === '' || content === '\\r')\n chompStart = i;\n else\n break;\n }\n // shortcut for empty contents\n if (chompStart === 0) {\n const value = header.chomp === '+' && lines.length > 0\n ? '\\n'.repeat(Math.max(1, lines.length - 1))\n : '';\n let end = start + header.length;\n if (scalar.source)\n end += scalar.source.length;\n return { value, type, comment: header.comment, range: [start, end, end] };\n }\n // find the indentation level to trim from start\n let trimIndent = scalar.indent + header.indent;\n let offset = scalar.offset + header.length;\n let contentStart = 0;\n for (let i = 0; i < chompStart; ++i) {\n const [indent, content] = lines[i];\n if (content === '' || content === '\\r') {\n if (header.indent === 0 && indent.length > trimIndent)\n trimIndent = indent.length;\n }\n else {\n if (indent.length < trimIndent) {\n const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';\n onError(offset + indent.length, 'MISSING_CHAR', message);\n }\n if (header.indent === 0)\n trimIndent = indent.length;\n contentStart = i;\n break;\n }\n offset += indent.length + content.length + 1;\n }\n // include trailing more-indented empty lines in content\n for (let i = lines.length - 1; i >= chompStart; --i) {\n if (lines[i][0].length > trimIndent)\n chompStart = i + 1;\n }\n let value = '';\n let sep = '';\n let prevMoreIndented = false;\n // leading whitespace is kept intact\n for (let i = 0; i < contentStart; ++i)\n value += lines[i][0].slice(trimIndent) + '\\n';\n for (let i = contentStart; i < chompStart; ++i) {\n let [indent, content] = lines[i];\n offset += indent.length + content.length + 1;\n const crlf = content[content.length - 1] === '\\r';\n if (crlf)\n content = content.slice(0, -1);\n /* istanbul ignore if already caught in lexer */\n if (content && indent.length < trimIndent) {\n const src = header.indent\n ? 'explicit indentation indicator'\n : 'first line';\n const message = `Block scalar lines must not be less indented than their ${src}`;\n onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);\n indent = '';\n }\n if (type === Scalar.Scalar.BLOCK_LITERAL) {\n value += sep + indent.slice(trimIndent) + content;\n sep = '\\n';\n }\n else if (indent.length > trimIndent || content[0] === '\\t') {\n // more-indented content within a folded block\n if (sep === ' ')\n sep = '\\n';\n else if (!prevMoreIndented && sep === '\\n')\n sep = '\\n\\n';\n value += sep + indent.slice(trimIndent) + content;\n sep = '\\n';\n prevMoreIndented = true;\n }\n else if (content === '') {\n // empty line\n if (sep === '\\n')\n value += '\\n';\n else\n sep = '\\n';\n }\n else {\n value += sep + content;\n sep = ' ';\n prevMoreIndented = false;\n }\n }\n switch (header.chomp) {\n case '-':\n break;\n case '+':\n for (let i = chompStart; i < lines.length; ++i)\n value += '\\n' + lines[i][0].slice(trimIndent);\n if (value[value.length - 1] !== '\\n')\n value += '\\n';\n break;\n default:\n value += '\\n';\n }\n const end = start + header.length + scalar.source.length;\n return { value, type, comment: header.comment, range: [start, end, end] };\n}\nfunction parseBlockScalarHeader({ offset, props }, strict, onError) {\n /* istanbul ignore if should not happen */\n if (props[0].type !== 'block-scalar-header') {\n onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');\n return null;\n }\n const { source } = props[0];\n const mode = source[0];\n let indent = 0;\n let chomp = '';\n let error = -1;\n for (let i = 1; i < source.length; ++i) {\n const ch = source[i];\n if (!chomp && (ch === '-' || ch === '+'))\n chomp = ch;\n else {\n const n = Number(ch);\n if (!indent && n)\n indent = n;\n else if (error === -1)\n error = offset + i;\n }\n }\n if (error !== -1)\n onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);\n let hasSpace = false;\n let comment = '';\n let length = source.length;\n for (let i = 1; i < props.length; ++i) {\n const token = props[i];\n switch (token.type) {\n case 'space':\n hasSpace = true;\n // fallthrough\n case 'newline':\n length += token.source.length;\n break;\n case 'comment':\n if (strict && !hasSpace) {\n const message = 'Comments must be separated from other tokens by white space characters';\n onError(token, 'MISSING_CHAR', message);\n }\n length += token.source.length;\n comment = token.source.substring(1);\n break;\n case 'error':\n onError(token, 'UNEXPECTED_TOKEN', token.message);\n length += token.source.length;\n break;\n /* istanbul ignore next should not happen */\n default: {\n const message = `Unexpected token in block scalar header: ${token.type}`;\n onError(token, 'UNEXPECTED_TOKEN', message);\n const ts = token.source;\n if (ts && typeof ts === 'string')\n length += ts.length;\n }\n }\n }\n return { mode, indent, chomp, comment, length };\n}\n/** @returns Array of lines split up as `[indent, content]` */\nfunction splitLines(source) {\n const split = source.split(/\\n( *)/);\n const first = split[0];\n const m = first.match(/^( *)/);\n const line0 = m?.[1]\n ? [m[1], first.slice(m[1].length)]\n : ['', first];\n const lines = [line0];\n for (let i = 1; i < split.length; i += 2)\n lines.push([split[i], split[i + 1]]);\n return lines;\n}\n\nexports.resolveBlockScalar = resolveBlockScalar;\n","'use strict';\n\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\n\nfunction resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) {\n const seq = new YAMLSeq.YAMLSeq(ctx.schema);\n if (ctx.atRoot)\n ctx.atRoot = false;\n let offset = bs.offset;\n let commentEnd = null;\n for (const { start, value } of bs.items) {\n const props = resolveProps.resolveProps(start, {\n indicator: 'seq-item-ind',\n next: value,\n offset,\n onError,\n startOnNewline: true\n });\n if (!props.found) {\n if (props.anchor || props.tag || value) {\n if (value && value.type === 'block-seq')\n onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column');\n else\n onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');\n }\n else {\n commentEnd = props.end;\n if (props.comment)\n seq.comment = props.comment;\n continue;\n }\n }\n const node = value\n ? composeNode(ctx, value, props, onError)\n : composeEmptyNode(ctx, props.end, start, null, props, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError);\n offset = node.range[2];\n seq.items.push(node);\n }\n seq.range = [bs.offset, offset, commentEnd ?? offset];\n return seq;\n}\n\nexports.resolveBlockSeq = resolveBlockSeq;\n","'use strict';\n\nfunction resolveEnd(end, offset, reqSpace, onError) {\n let comment = '';\n if (end) {\n let hasSpace = false;\n let sep = '';\n for (const token of end) {\n const { source, type } = token;\n switch (type) {\n case 'space':\n hasSpace = true;\n break;\n case 'comment': {\n if (reqSpace && !hasSpace)\n onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n const cb = source.substring(1) || ' ';\n if (!comment)\n comment = cb;\n else\n comment += sep + cb;\n sep = '';\n break;\n }\n case 'newline':\n if (comment)\n sep += source;\n hasSpace = true;\n break;\n default:\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);\n }\n offset += source.length;\n }\n }\n return { comment, offset };\n}\n\nexports.resolveEnd = resolveEnd;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst blockMsg = 'Block collections are not allowed within flow collections';\nconst isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');\nfunction resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) {\n const isMap = fc.start.source === '{';\n const fcName = isMap ? 'flow map' : 'flow sequence';\n const coll = isMap\n ? new YAMLMap.YAMLMap(ctx.schema)\n : new YAMLSeq.YAMLSeq(ctx.schema);\n coll.flow = true;\n const atRoot = ctx.atRoot;\n if (atRoot)\n ctx.atRoot = false;\n let offset = fc.offset + fc.start.source.length;\n for (let i = 0; i < fc.items.length; ++i) {\n const collItem = fc.items[i];\n const { start, key, sep, value } = collItem;\n const props = resolveProps.resolveProps(start, {\n flow: fcName,\n indicator: 'explicit-key-ind',\n next: key ?? sep?.[0],\n offset,\n onError,\n startOnNewline: false\n });\n if (!props.found) {\n if (!props.anchor && !props.tag && !sep && !value) {\n if (i === 0 && props.comma)\n onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n else if (i < fc.items.length - 1)\n onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);\n if (props.comment) {\n if (coll.comment)\n coll.comment += '\\n' + props.comment;\n else\n coll.comment = props.comment;\n }\n offset = props.end;\n continue;\n }\n if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key))\n onError(key, // checked by containsNewline()\n 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n }\n if (i === 0) {\n if (props.comma)\n onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n }\n else {\n if (!props.comma)\n onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);\n if (props.comment) {\n let prevItemComment = '';\n loop: for (const st of start) {\n switch (st.type) {\n case 'comma':\n case 'space':\n break;\n case 'comment':\n prevItemComment = st.source.substring(1);\n break loop;\n default:\n break loop;\n }\n }\n if (prevItemComment) {\n let prev = coll.items[coll.items.length - 1];\n if (Node.isPair(prev))\n prev = prev.value ?? prev.key;\n if (prev.comment)\n prev.comment += '\\n' + prevItemComment;\n else\n prev.comment = prevItemComment;\n props.comment = props.comment.substring(prevItemComment.length + 1);\n }\n }\n }\n if (!isMap && !sep && !props.found) {\n // item is a value in a seq\n // → key & sep are empty, start does not include ? or :\n const valueNode = value\n ? composeNode(ctx, value, props, onError)\n : composeEmptyNode(ctx, props.end, sep, null, props, onError);\n coll.items.push(valueNode);\n offset = valueNode.range[2];\n if (isBlock(value))\n onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n }\n else {\n // item is a key+value pair\n // key value\n const keyStart = props.end;\n const keyNode = key\n ? composeNode(ctx, key, props, onError)\n : composeEmptyNode(ctx, keyStart, start, null, props, onError);\n if (isBlock(key))\n onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);\n // value properties\n const valueProps = resolveProps.resolveProps(sep ?? [], {\n flow: fcName,\n indicator: 'map-value-ind',\n next: value,\n offset: keyNode.range[2],\n onError,\n startOnNewline: false\n });\n if (valueProps.found) {\n if (!isMap && !props.found && ctx.options.strict) {\n if (sep)\n for (const st of sep) {\n if (st === valueProps.found)\n break;\n if (st.type === 'newline') {\n onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n break;\n }\n }\n if (props.start < valueProps.found.offset - 1024)\n onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');\n }\n }\n else if (value) {\n if ('source' in value && value.source && value.source[0] === ':')\n onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);\n else\n onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);\n }\n // value value\n const valueNode = value\n ? composeNode(ctx, value, valueProps, onError)\n : valueProps.found\n ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)\n : null;\n if (valueNode) {\n if (isBlock(value))\n onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n }\n else if (valueProps.comment) {\n if (keyNode.comment)\n keyNode.comment += '\\n' + valueProps.comment;\n else\n keyNode.comment = valueProps.comment;\n }\n const pair = new Pair.Pair(keyNode, valueNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n if (isMap) {\n const map = coll;\n if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n map.items.push(pair);\n }\n else {\n const map = new YAMLMap.YAMLMap(ctx.schema);\n map.flow = true;\n map.items.push(pair);\n coll.items.push(map);\n }\n offset = valueNode ? valueNode.range[2] : valueProps.end;\n }\n }\n const expectedEnd = isMap ? '}' : ']';\n const [ce, ...ee] = fc.end;\n let cePos = offset;\n if (ce && ce.source === expectedEnd)\n cePos = ce.offset + ce.source.length;\n else {\n const name = fcName[0].toUpperCase() + fcName.substring(1);\n const msg = atRoot\n ? `${name} must end with a ${expectedEnd}`\n : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;\n onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);\n if (ce && ce.source.length !== 1)\n ee.unshift(ce);\n }\n if (ee.length > 0) {\n const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError);\n if (end.comment) {\n if (coll.comment)\n coll.comment += '\\n' + end.comment;\n else\n coll.comment = end.comment;\n }\n coll.range = [fc.offset, cePos, end.offset];\n }\n else {\n coll.range = [fc.offset, cePos, cePos];\n }\n return coll;\n}\n\nexports.resolveFlowCollection = resolveFlowCollection;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction resolveFlowScalar(scalar, strict, onError) {\n const { offset, type, source, end } = scalar;\n let _type;\n let value;\n const _onError = (rel, code, msg) => onError(offset + rel, code, msg);\n switch (type) {\n case 'scalar':\n _type = Scalar.Scalar.PLAIN;\n value = plainValue(source, _onError);\n break;\n case 'single-quoted-scalar':\n _type = Scalar.Scalar.QUOTE_SINGLE;\n value = singleQuotedValue(source, _onError);\n break;\n case 'double-quoted-scalar':\n _type = Scalar.Scalar.QUOTE_DOUBLE;\n value = doubleQuotedValue(source, _onError);\n break;\n /* istanbul ignore next should not happen */\n default:\n onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);\n return {\n value: '',\n type: null,\n comment: '',\n range: [offset, offset + source.length, offset + source.length]\n };\n }\n const valueEnd = offset + source.length;\n const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError);\n return {\n value,\n type: _type,\n comment: re.comment,\n range: [offset, valueEnd, re.offset]\n };\n}\nfunction plainValue(source, onError) {\n let badChar = '';\n switch (source[0]) {\n /* istanbul ignore next should not happen */\n case '\\t':\n badChar = 'a tab character';\n break;\n case ',':\n badChar = 'flow indicator character ,';\n break;\n case '%':\n badChar = 'directive indicator character %';\n break;\n case '|':\n case '>': {\n badChar = `block scalar indicator ${source[0]}`;\n break;\n }\n case '@':\n case '`': {\n badChar = `reserved character ${source[0]}`;\n break;\n }\n }\n if (badChar)\n onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);\n return foldLines(source);\n}\nfunction singleQuotedValue(source, onError) {\n if (source[source.length - 1] !== \"'\" || source.length === 1)\n onError(source.length, 'MISSING_CHAR', \"Missing closing 'quote\");\n return foldLines(source.slice(1, -1)).replace(/''/g, \"'\");\n}\nfunction foldLines(source) {\n /**\n * The negative lookbehind here and in the `re` RegExp is to\n * prevent causing a polynomial search time in certain cases.\n *\n * The try-catch is for Safari, which doesn't support this yet:\n * https://caniuse.com/js-regexp-lookbehind\n */\n let first, line;\n try {\n first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch;\n }\n else {\n res += ch;\n }\n }\n if (source[source.length - 1] !== '\"' || source.length === 1)\n onError(source.length, 'MISSING_CHAR', 'Missing closing \"quote');\n return res;\n}\n/**\n * Fold a single newline into a space, multiple newlines to N - 1 newlines.\n * Presumes `source[offset] === '\\n'`\n */\nfunction foldNewline(source, offset) {\n let fold = '';\n let ch = source[offset + 1];\n while (ch === ' ' || ch === '\\t' || ch === '\\n' || ch === '\\r') {\n if (ch === '\\r' && source[offset + 2] !== '\\n')\n break;\n if (ch === '\\n')\n fold += '\\n';\n offset += 1;\n ch = source[offset + 1];\n }\n if (!fold)\n fold = ' ';\n return { fold, offset };\n}\nconst escapeCodes = {\n '0': '\\0',\n a: '\\x07',\n b: '\\b',\n e: '\\x1b',\n f: '\\f',\n n: '\\n',\n r: '\\r',\n t: '\\t',\n v: '\\v',\n N: '\\u0085',\n _: '\\u00a0',\n L: '\\u2028',\n P: '\\u2029',\n ' ': ' ',\n '\"': '\"',\n '/': '/',\n '\\\\': '\\\\',\n '\\t': '\\t'\n};\nfunction parseCharCode(source, offset, length, onError) {\n const cc = source.substr(offset, length);\n const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);\n const code = ok ? parseInt(cc, 16) : NaN;\n if (isNaN(code)) {\n const raw = source.substr(offset - 2, length + 2);\n onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);\n return raw;\n }\n return String.fromCodePoint(code);\n}\n\nexports.resolveFlowScalar = resolveFlowScalar;\n","'use strict';\n\nfunction resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {\n let spaceBefore = false;\n let atNewline = startOnNewline;\n let hasSpace = startOnNewline;\n let comment = '';\n let commentSep = '';\n let hasNewline = false;\n let hasNewlineAfterProp = false;\n let reqSpace = false;\n let anchor = null;\n let tag = null;\n let comma = null;\n let found = null;\n let start = null;\n for (const token of tokens) {\n if (reqSpace) {\n if (token.type !== 'space' &&\n token.type !== 'newline' &&\n token.type !== 'comma')\n onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n reqSpace = false;\n }\n switch (token.type) {\n case 'space':\n // At the doc level, tabs at line start may be parsed\n // as leading white space rather than indentation.\n // In a flow collection, only the parser handles indent.\n if (!flow &&\n atNewline &&\n indicator !== 'doc-start' &&\n token.source[0] === '\\t')\n onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');\n hasSpace = true;\n break;\n case 'comment': {\n if (!hasSpace)\n onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n const cb = token.source.substring(1) || ' ';\n if (!comment)\n comment = cb;\n else\n comment += commentSep + cb;\n commentSep = '';\n atNewline = false;\n break;\n }\n case 'newline':\n if (atNewline) {\n if (comment)\n comment += token.source;\n else\n spaceBefore = true;\n }\n else\n commentSep += token.source;\n atNewline = true;\n hasNewline = true;\n if (anchor || tag)\n hasNewlineAfterProp = true;\n hasSpace = true;\n break;\n case 'anchor':\n if (anchor)\n onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');\n if (token.source.endsWith(':'))\n onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);\n anchor = token;\n if (start === null)\n start = token.offset;\n atNewline = false;\n hasSpace = false;\n reqSpace = true;\n break;\n case 'tag': {\n if (tag)\n onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');\n tag = token;\n if (start === null)\n start = token.offset;\n atNewline = false;\n hasSpace = false;\n reqSpace = true;\n break;\n }\n case indicator:\n // Could here handle preceding comments differently\n if (anchor || tag)\n onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);\n if (found)\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);\n found = token;\n atNewline = false;\n hasSpace = false;\n break;\n case 'comma':\n if (flow) {\n if (comma)\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);\n comma = token;\n atNewline = false;\n hasSpace = false;\n break;\n }\n // else fallthrough\n default:\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);\n atNewline = false;\n hasSpace = false;\n }\n }\n const last = tokens[tokens.length - 1];\n const end = last ? last.offset + last.source.length : offset;\n if (reqSpace &&\n next &&\n next.type !== 'space' &&\n next.type !== 'newline' &&\n next.type !== 'comma' &&\n (next.type !== 'scalar' || next.source !== ''))\n onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n return {\n comma,\n found,\n spaceBefore,\n comment,\n hasNewline,\n hasNewlineAfterProp,\n anchor,\n tag,\n end,\n start: start ?? end\n };\n}\n\nexports.resolveProps = resolveProps;\n","'use strict';\n\nfunction containsNewline(key) {\n if (!key)\n return null;\n switch (key.type) {\n case 'alias':\n case 'scalar':\n case 'double-quoted-scalar':\n case 'single-quoted-scalar':\n if (key.source.includes('\\n'))\n return true;\n if (key.end)\n for (const st of key.end)\n if (st.type === 'newline')\n return true;\n return false;\n case 'flow-collection':\n for (const it of key.items) {\n for (const st of it.start)\n if (st.type === 'newline')\n return true;\n if (it.sep)\n for (const st of it.sep)\n if (st.type === 'newline')\n return true;\n if (containsNewline(it.key) || containsNewline(it.value))\n return true;\n }\n return false;\n default:\n return true;\n }\n}\n\nexports.containsNewline = containsNewline;\n","'use strict';\n\nfunction emptyScalarPosition(offset, before, pos) {\n if (before) {\n if (pos === null)\n pos = before.length;\n for (let i = pos - 1; i >= 0; --i) {\n let st = before[i];\n switch (st.type) {\n case 'space':\n case 'comment':\n case 'newline':\n offset -= st.source.length;\n continue;\n }\n // Technically, an empty scalar is immediately after the last non-empty\n // node, but it's more useful to place it after any whitespace.\n st = before[++i];\n while (st?.type === 'space') {\n offset += st.source.length;\n st = before[++i];\n }\n break;\n }\n }\n return offset;\n}\n\nexports.emptyScalarPosition = emptyScalarPosition;\n","'use strict';\n\nvar utilContainsNewline = require('./util-contains-newline.js');\n\nfunction flowIndentCheck(indent, fc, onError) {\n if (fc?.type === 'flow-collection') {\n const end = fc.end[0];\n if (end.indent === indent &&\n (end.source === ']' || end.source === '}') &&\n utilContainsNewline.containsNewline(fc)) {\n const msg = 'Flow end indicator should be more indented than parent';\n onError(end, 'BAD_INDENT', msg, true);\n }\n }\n}\n\nexports.flowIndentCheck = flowIndentCheck;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\n\nfunction mapIncludes(ctx, items, search) {\n const { uniqueKeys } = ctx.options;\n if (uniqueKeys === false)\n return false;\n const isEqual = typeof uniqueKeys === 'function'\n ? uniqueKeys\n : (a, b) => a === b ||\n (Node.isScalar(a) &&\n Node.isScalar(b) &&\n a.value === b.value &&\n !(a.value === '<<' && ctx.schema.merge));\n return items.some(pair => isEqual(pair.key, search));\n}\n\nexports.mapIncludes = mapIncludes;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar Collection = require('../nodes/Collection.js');\nvar Node = require('../nodes/Node.js');\nvar Pair = require('../nodes/Pair.js');\nvar toJS = require('../nodes/toJS.js');\nvar Schema = require('../schema/Schema.js');\nvar stringify = require('../stringify/stringify.js');\nvar stringifyDocument = require('../stringify/stringifyDocument.js');\nvar anchors = require('./anchors.js');\nvar applyReviver = require('./applyReviver.js');\nvar createNode = require('./createNode.js');\nvar directives = require('./directives.js');\n\nclass Document {\n constructor(value, replacer, options) {\n /** A comment before this Document */\n this.commentBefore = null;\n /** A comment immediately after this Document */\n this.comment = null;\n /** Errors encountered during parsing. */\n this.errors = [];\n /** Warnings encountered during parsing. */\n this.warnings = [];\n Object.defineProperty(this, Node.NODE_TYPE, { value: Node.DOC });\n let _replacer = null;\n if (typeof replacer === 'function' || Array.isArray(replacer)) {\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n replacer = undefined;\n }\n const opt = Object.assign({\n intAsBigInt: false,\n keepSourceTokens: false,\n logLevel: 'warn',\n prettyErrors: true,\n strict: true,\n uniqueKeys: true,\n version: '1.2'\n }, options);\n this.options = opt;\n let { version } = opt;\n if (options?._directives) {\n this.directives = options._directives.atDocument();\n if (this.directives.yaml.explicit)\n version = this.directives.yaml.version;\n }\n else\n this.directives = new directives.Directives({ version });\n this.setSchema(version, options);\n if (value === undefined)\n this.contents = null;\n else {\n this.contents = this.createNode(value, _replacer, options);\n }\n }\n /**\n * Create a deep copy of this Document and its contents.\n *\n * Custom Node values that inherit from `Object` still refer to their original instances.\n */\n clone() {\n const copy = Object.create(Document.prototype, {\n [Node.NODE_TYPE]: { value: Node.DOC }\n });\n copy.commentBefore = this.commentBefore;\n copy.comment = this.comment;\n copy.errors = this.errors.slice();\n copy.warnings = this.warnings.slice();\n copy.options = Object.assign({}, this.options);\n if (this.directives)\n copy.directives = this.directives.clone();\n copy.schema = this.schema.clone();\n copy.contents = Node.isNode(this.contents)\n ? this.contents.clone(copy.schema)\n : this.contents;\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /** Adds a value to the document. */\n add(value) {\n if (assertCollection(this.contents))\n this.contents.add(value);\n }\n /** Adds a value to the document. */\n addIn(path, value) {\n if (assertCollection(this.contents))\n this.contents.addIn(path, value);\n }\n /**\n * Create a new `Alias` node, ensuring that the target `node` has the required anchor.\n *\n * If `node` already has an anchor, `name` is ignored.\n * Otherwise, the `node.anchor` value will be set to `name`,\n * or if an anchor with that name is already present in the document,\n * `name` will be used as a prefix for a new unique anchor.\n * If `name` is undefined, the generated anchor will use 'a' as a prefix.\n */\n createAlias(node, name) {\n if (!node.anchor) {\n const prev = anchors.anchorNames(this);\n node.anchor =\n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name;\n }\n return new Alias.Alias(node.anchor);\n }\n createNode(value, replacer, options) {\n let _replacer = undefined;\n if (typeof replacer === 'function') {\n value = replacer.call({ '': value }, '', value);\n _replacer = replacer;\n }\n else if (Array.isArray(replacer)) {\n const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;\n const asStr = replacer.filter(keyToStr).map(String);\n if (asStr.length > 0)\n replacer = replacer.concat(asStr);\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n replacer = undefined;\n }\n const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {};\n const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, \n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n anchorPrefix || 'a');\n const ctx = {\n aliasDuplicateObjects: aliasDuplicateObjects ?? true,\n keepUndefined: keepUndefined ?? false,\n onAnchor,\n onTagObj,\n replacer: _replacer,\n schema: this.schema,\n sourceObjects\n };\n const node = createNode.createNode(value, tag, ctx);\n if (flow && Node.isCollection(node))\n node.flow = true;\n setAnchors();\n return node;\n }\n /**\n * Convert a key and a value into a `Pair` using the current schema,\n * recursively wrapping all values as `Scalar` or `Collection` nodes.\n */\n createPair(key, value, options = {}) {\n const k = this.createNode(key, null, options);\n const v = this.createNode(value, null, options);\n return new Pair.Pair(k, v);\n }\n /**\n * Removes a value from the document.\n * @returns `true` if the item was found and removed.\n */\n delete(key) {\n return assertCollection(this.contents) ? this.contents.delete(key) : false;\n }\n /**\n * Removes a value from the document.\n * @returns `true` if the item was found and removed.\n */\n deleteIn(path) {\n if (Collection.isEmptyPath(path)) {\n if (this.contents == null)\n return false;\n this.contents = null;\n return true;\n }\n return assertCollection(this.contents)\n ? this.contents.deleteIn(path)\n : false;\n }\n /**\n * Returns item at `key`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n get(key, keepScalar) {\n return Node.isCollection(this.contents)\n ? this.contents.get(key, keepScalar)\n : undefined;\n }\n /**\n * Returns item at `path`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n getIn(path, keepScalar) {\n if (Collection.isEmptyPath(path))\n return !keepScalar && Node.isScalar(this.contents)\n ? this.contents.value\n : this.contents;\n return Node.isCollection(this.contents)\n ? this.contents.getIn(path, keepScalar)\n : undefined;\n }\n /**\n * Checks if the document includes a value with the key `key`.\n */\n has(key) {\n return Node.isCollection(this.contents) ? this.contents.has(key) : false;\n }\n /**\n * Checks if the document includes a value at `path`.\n */\n hasIn(path) {\n if (Collection.isEmptyPath(path))\n return this.contents !== undefined;\n return Node.isCollection(this.contents) ? this.contents.hasIn(path) : false;\n }\n /**\n * Sets a value in this document. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n set(key, value) {\n if (this.contents == null) {\n this.contents = Collection.collectionFromPath(this.schema, [key], value);\n }\n else if (assertCollection(this.contents)) {\n this.contents.set(key, value);\n }\n }\n /**\n * Sets a value in this document. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n setIn(path, value) {\n if (Collection.isEmptyPath(path))\n this.contents = value;\n else if (this.contents == null) {\n this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value);\n }\n else if (assertCollection(this.contents)) {\n this.contents.setIn(path, value);\n }\n }\n /**\n * Change the YAML version and schema used by the document.\n * A `null` version disables support for directives, explicit tags, anchors, and aliases.\n * It also requires the `schema` option to be given as a `Schema` instance value.\n *\n * Overrides all previously set schema options.\n */\n setSchema(version, options = {}) {\n if (typeof version === 'number')\n version = String(version);\n let opt;\n switch (version) {\n case '1.1':\n if (this.directives)\n this.directives.yaml.version = '1.1';\n else\n this.directives = new directives.Directives({ version: '1.1' });\n opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };\n break;\n case '1.2':\n case 'next':\n if (this.directives)\n this.directives.yaml.version = version;\n else\n this.directives = new directives.Directives({ version });\n opt = { merge: false, resolveKnownTags: true, schema: 'core' };\n break;\n case null:\n if (this.directives)\n delete this.directives;\n opt = null;\n break;\n default: {\n const sv = JSON.stringify(version);\n throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);\n }\n }\n // Not using `instanceof Schema` to allow for duck typing\n if (options.schema instanceof Object)\n this.schema = options.schema;\n else if (opt)\n this.schema = new Schema.Schema(Object.assign(opt, options));\n else\n throw new Error(`With a null YAML version, the { schema: Schema } option is required`);\n }\n // json & jsonArg are only used from toJSON()\n toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {\n const ctx = {\n anchors: new Map(),\n doc: this,\n keep: !json,\n mapAsMap: mapAsMap === true,\n mapKeyWarned: false,\n maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100,\n stringify: stringify.stringify\n };\n const res = toJS.toJS(this.contents, jsonArg ?? '', ctx);\n if (typeof onAnchor === 'function')\n for (const { count, res } of ctx.anchors.values())\n onAnchor(res, count);\n return typeof reviver === 'function'\n ? applyReviver.applyReviver(reviver, { '': res }, '', res)\n : res;\n }\n /**\n * A JSON representation of the document `contents`.\n *\n * @param jsonArg Used by `JSON.stringify` to indicate the array index or\n * property name.\n */\n toJSON(jsonArg, onAnchor) {\n return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });\n }\n /** A YAML representation of the document. */\n toString(options = {}) {\n if (this.errors.length > 0)\n throw new Error('Document with errors cannot be stringified');\n if ('indent' in options &&\n (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {\n const s = JSON.stringify(options.indent);\n throw new Error(`\"indent\" option must be a positive integer, not ${s}`);\n }\n return stringifyDocument.stringifyDocument(this, options);\n }\n}\nfunction assertCollection(contents) {\n if (Node.isCollection(contents))\n return true;\n throw new Error('Expected a YAML collection as document contents');\n}\n\nexports.Document = Document;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar visit = require('../visit.js');\n\n/**\n * Verify that the input string is a valid anchor.\n *\n * Will throw on errors.\n */\nfunction anchorIsValid(anchor) {\n if (/[\\x00-\\x19\\s,[\\]{}]/.test(anchor)) {\n const sa = JSON.stringify(anchor);\n const msg = `Anchor must not contain whitespace or control characters: ${sa}`;\n throw new Error(msg);\n }\n return true;\n}\nfunction anchorNames(root) {\n const anchors = new Set();\n visit.visit(root, {\n Value(_key, node) {\n if (node.anchor)\n anchors.add(node.anchor);\n }\n });\n return anchors;\n}\n/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */\nfunction findNewAnchor(prefix, exclude) {\n for (let i = 1; true; ++i) {\n const name = `${prefix}${i}`;\n if (!exclude.has(name))\n return name;\n }\n}\nfunction createNodeAnchors(doc, prefix) {\n const aliasObjects = [];\n const sourceObjects = new Map();\n let prevAnchors = null;\n return {\n onAnchor: (source) => {\n aliasObjects.push(source);\n if (!prevAnchors)\n prevAnchors = anchorNames(doc);\n const anchor = findNewAnchor(prefix, prevAnchors);\n prevAnchors.add(anchor);\n return anchor;\n },\n /**\n * With circular references, the source node is only resolved after all\n * of its child nodes are. This is why anchors are set only after all of\n * the nodes have been created.\n */\n setAnchors: () => {\n for (const source of aliasObjects) {\n const ref = sourceObjects.get(source);\n if (typeof ref === 'object' &&\n ref.anchor &&\n (Node.isScalar(ref.node) || Node.isCollection(ref.node))) {\n ref.node.anchor = ref.anchor;\n }\n else {\n const error = new Error('Failed to resolve repeated object (this should not happen)');\n error.source = source;\n throw error;\n }\n }\n },\n sourceObjects\n };\n}\n\nexports.anchorIsValid = anchorIsValid;\nexports.anchorNames = anchorNames;\nexports.createNodeAnchors = createNodeAnchors;\nexports.findNewAnchor = findNewAnchor;\n","'use strict';\n\n/**\n * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,\n * in section 24.5.1.1 \"Runtime Semantics: InternalizeJSONProperty\" of the\n * 2021 edition: https://tc39.es/ecma262/#sec-json.parse\n *\n * Includes extensions for handling Map and Set objects.\n */\nfunction applyReviver(reviver, obj, key, val) {\n if (val && typeof val === 'object') {\n if (Array.isArray(val)) {\n for (let i = 0, len = val.length; i < len; ++i) {\n const v0 = val[i];\n const v1 = applyReviver(reviver, val, String(i), v0);\n if (v1 === undefined)\n delete val[i];\n else if (v1 !== v0)\n val[i] = v1;\n }\n }\n else if (val instanceof Map) {\n for (const k of Array.from(val.keys())) {\n const v0 = val.get(k);\n const v1 = applyReviver(reviver, val, k, v0);\n if (v1 === undefined)\n val.delete(k);\n else if (v1 !== v0)\n val.set(k, v1);\n }\n }\n else if (val instanceof Set) {\n for (const v0 of Array.from(val)) {\n const v1 = applyReviver(reviver, val, v0, v0);\n if (v1 === undefined)\n val.delete(v0);\n else if (v1 !== v0) {\n val.delete(v0);\n val.add(v1);\n }\n }\n }\n else {\n for (const [k, v0] of Object.entries(val)) {\n const v1 = applyReviver(reviver, val, k, v0);\n if (v1 === undefined)\n delete val[k];\n else if (v1 !== v0)\n val[k] = v1;\n }\n }\n }\n return reviver.call(obj, key, val);\n}\n\nexports.applyReviver = applyReviver;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\n\nconst defaultTagPrefix = 'tag:yaml.org,2002:';\nfunction findTagObject(value, tagName, tags) {\n if (tagName) {\n const match = tags.filter(t => t.tag === tagName);\n const tagObj = match.find(t => !t.format) ?? match[0];\n if (!tagObj)\n throw new Error(`Tag ${tagName} not found`);\n return tagObj;\n }\n return tags.find(t => t.identify?.(value) && !t.format);\n}\nfunction createNode(value, tagName, ctx) {\n if (Node.isDocument(value))\n value = value.contents;\n if (Node.isNode(value))\n return value;\n if (Node.isPair(value)) {\n const map = ctx.schema[Node.MAP].createNode?.(ctx.schema, null, ctx);\n map.items.push(value);\n return map;\n }\n if (value instanceof String ||\n value instanceof Number ||\n value instanceof Boolean ||\n (typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere\n ) {\n // https://tc39.es/ecma262/#sec-serializejsonproperty\n value = value.valueOf();\n }\n const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;\n // Detect duplicate references to the same object & use Alias nodes for all\n // after first. The `ref` wrapper allows for circular references to resolve.\n let ref = undefined;\n if (aliasDuplicateObjects && value && typeof value === 'object') {\n ref = sourceObjects.get(value);\n if (ref) {\n if (!ref.anchor)\n ref.anchor = onAnchor(value);\n return new Alias.Alias(ref.anchor);\n }\n else {\n ref = { anchor: null, node: null };\n sourceObjects.set(value, ref);\n }\n }\n if (tagName?.startsWith('!!'))\n tagName = defaultTagPrefix + tagName.slice(2);\n let tagObj = findTagObject(value, tagName, schema.tags);\n if (!tagObj) {\n if (value && typeof value.toJSON === 'function') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n value = value.toJSON();\n }\n if (!value || typeof value !== 'object') {\n const node = new Scalar.Scalar(value);\n if (ref)\n ref.node = node;\n return node;\n }\n tagObj =\n value instanceof Map\n ? schema[Node.MAP]\n : Symbol.iterator in Object(value)\n ? schema[Node.SEQ]\n : schema[Node.MAP];\n }\n if (onTagObj) {\n onTagObj(tagObj);\n delete ctx.onTagObj;\n }\n const node = tagObj?.createNode\n ? tagObj.createNode(ctx.schema, value, ctx)\n : new Scalar.Scalar(value);\n if (tagName)\n node.tag = tagName;\n if (ref)\n ref.node = node;\n return node;\n}\n\nexports.createNode = createNode;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar visit = require('../visit.js');\n\nconst escapeChars = {\n '!': '%21',\n ',': '%2C',\n '[': '%5B',\n ']': '%5D',\n '{': '%7B',\n '}': '%7D'\n};\nconst escapeTagName = (tn) => tn.replace(/[!,[\\]{}]/g, ch => escapeChars[ch]);\nclass Directives {\n constructor(yaml, tags) {\n /**\n * The directives-end/doc-start marker `---`. If `null`, a marker may still be\n * included in the document's stringified representation.\n */\n this.docStart = null;\n /** The doc-end marker `...`. */\n this.docEnd = false;\n this.yaml = Object.assign({}, Directives.defaultYaml, yaml);\n this.tags = Object.assign({}, Directives.defaultTags, tags);\n }\n clone() {\n const copy = new Directives(this.yaml, this.tags);\n copy.docStart = this.docStart;\n return copy;\n }\n /**\n * During parsing, get a Directives instance for the current document and\n * update the stream state according to the current version's spec.\n */\n atDocument() {\n const res = new Directives(this.yaml, this.tags);\n switch (this.yaml.version) {\n case '1.1':\n this.atNextDocument = true;\n break;\n case '1.2':\n this.atNextDocument = false;\n this.yaml = {\n explicit: Directives.defaultYaml.explicit,\n version: '1.2'\n };\n this.tags = Object.assign({}, Directives.defaultTags);\n break;\n }\n return res;\n }\n /**\n * @param onError - May be called even if the action was successful\n * @returns `true` on success\n */\n add(line, onError) {\n if (this.atNextDocument) {\n this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };\n this.tags = Object.assign({}, Directives.defaultTags);\n this.atNextDocument = false;\n }\n const parts = line.trim().split(/[ \\t]+/);\n const name = parts.shift();\n switch (name) {\n case '%TAG': {\n if (parts.length !== 2) {\n onError(0, '%TAG directive should contain exactly two parts');\n if (parts.length < 2)\n return false;\n }\n const [handle, prefix] = parts;\n this.tags[handle] = prefix;\n return true;\n }\n case '%YAML': {\n this.yaml.explicit = true;\n if (parts.length !== 1) {\n onError(0, '%YAML directive should contain exactly one part');\n return false;\n }\n const [version] = parts;\n if (version === '1.1' || version === '1.2') {\n this.yaml.version = version;\n return true;\n }\n else {\n const isValid = /^\\d+\\.\\d+$/.test(version);\n onError(6, `Unsupported YAML version ${version}`, isValid);\n return false;\n }\n }\n default:\n onError(0, `Unknown directive ${name}`, true);\n return false;\n }\n }\n /**\n * Resolves a tag, matching handles to those defined in %TAG directives.\n *\n * @returns Resolved tag, which may also be the non-specific tag `'!'` or a\n * `'!local'` tag, or `null` if unresolvable.\n */\n tagName(source, onError) {\n if (source === '!')\n return '!'; // non-specific tag\n if (source[0] !== '!') {\n onError(`Not a valid tag: ${source}`);\n return null;\n }\n if (source[1] === '<') {\n const verbatim = source.slice(2, -1);\n if (verbatim === '!' || verbatim === '!!') {\n onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);\n return null;\n }\n if (source[source.length - 1] !== '>')\n onError('Verbatim tags must end with a >');\n return verbatim;\n }\n const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/);\n if (!suffix)\n onError(`The ${source} tag has no suffix`);\n const prefix = this.tags[handle];\n if (prefix)\n return prefix + decodeURIComponent(suffix);\n if (handle === '!')\n return source; // local tag\n onError(`Could not resolve tag: ${source}`);\n return null;\n }\n /**\n * Given a fully resolved tag, returns its printable string form,\n * taking into account current tag prefixes and defaults.\n */\n tagString(tag) {\n for (const [handle, prefix] of Object.entries(this.tags)) {\n if (tag.startsWith(prefix))\n return handle + escapeTagName(tag.substring(prefix.length));\n }\n return tag[0] === '!' ? tag : `!<${tag}>`;\n }\n toString(doc) {\n const lines = this.yaml.explicit\n ? [`%YAML ${this.yaml.version || '1.2'}`]\n : [];\n const tagEntries = Object.entries(this.tags);\n let tagNames;\n if (doc && tagEntries.length > 0 && Node.isNode(doc.contents)) {\n const tags = {};\n visit.visit(doc.contents, (_key, node) => {\n if (Node.isNode(node) && node.tag)\n tags[node.tag] = true;\n });\n tagNames = Object.keys(tags);\n }\n else\n tagNames = [];\n for (const [handle, prefix] of tagEntries) {\n if (handle === '!!' && prefix === 'tag:yaml.org,2002:')\n continue;\n if (!doc || tagNames.some(tn => tn.startsWith(prefix)))\n lines.push(`%TAG ${handle} ${prefix}`);\n }\n return lines.join('\\n');\n }\n}\nDirectives.defaultYaml = { explicit: false, version: '1.2' };\nDirectives.defaultTags = { '!!': 'tag:yaml.org,2002:' };\n\nexports.Directives = Directives;\n","'use strict';\n\nclass YAMLError extends Error {\n constructor(name, pos, code, message) {\n super();\n this.name = name;\n this.code = code;\n this.message = message;\n this.pos = pos;\n }\n}\nclass YAMLParseError extends YAMLError {\n constructor(pos, code, message) {\n super('YAMLParseError', pos, code, message);\n }\n}\nclass YAMLWarning extends YAMLError {\n constructor(pos, code, message) {\n super('YAMLWarning', pos, code, message);\n }\n}\nconst prettifyError = (src, lc) => (error) => {\n if (error.pos[0] === -1)\n return;\n error.linePos = error.pos.map(pos => lc.linePos(pos));\n const { line, col } = error.linePos[0];\n error.message += ` at line ${line}, column ${col}`;\n let ci = col - 1;\n let lineStr = src\n .substring(lc.lineStarts[line - 1], lc.lineStarts[line])\n .replace(/[\\n\\r]+$/, '');\n // Trim to max 80 chars, keeping col position near the middle\n if (ci >= 60 && lineStr.length > 80) {\n const trimStart = Math.min(ci - 39, lineStr.length - 79);\n lineStr = '…' + lineStr.substring(trimStart);\n ci -= trimStart - 1;\n }\n if (lineStr.length > 80)\n lineStr = lineStr.substring(0, 79) + '…';\n // Include previous line in context if pointing at line start\n if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {\n // Regexp won't match if start is trimmed\n let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);\n if (prev.length > 80)\n prev = prev.substring(0, 79) + '…\\n';\n lineStr = prev + lineStr;\n }\n if (/[^ ]/.test(lineStr)) {\n let count = 1;\n const end = error.linePos[1];\n if (end && end.line === line && end.col > col) {\n count = Math.max(1, Math.min(end.col - col, 80 - ci));\n }\n const pointer = ' '.repeat(ci) + '^'.repeat(count);\n error.message += `:\\n\\n${lineStr}\\n${pointer}\\n`;\n }\n};\n\nexports.YAMLError = YAMLError;\nexports.YAMLParseError = YAMLParseError;\nexports.YAMLWarning = YAMLWarning;\nexports.prettifyError = prettifyError;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar Schema = require('./schema/Schema.js');\nvar errors = require('./errors.js');\nvar Alias = require('./nodes/Alias.js');\nvar Node = require('./nodes/Node.js');\nvar Pair = require('./nodes/Pair.js');\nvar Scalar = require('./nodes/Scalar.js');\nvar YAMLMap = require('./nodes/YAMLMap.js');\nvar YAMLSeq = require('./nodes/YAMLSeq.js');\nvar cst = require('./parse/cst.js');\nvar lexer = require('./parse/lexer.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\nvar publicApi = require('./public-api.js');\nvar visit = require('./visit.js');\n\n\n\nexports.Composer = composer.Composer;\nexports.Document = Document.Document;\nexports.Schema = Schema.Schema;\nexports.YAMLError = errors.YAMLError;\nexports.YAMLParseError = errors.YAMLParseError;\nexports.YAMLWarning = errors.YAMLWarning;\nexports.Alias = Alias.Alias;\nexports.isAlias = Node.isAlias;\nexports.isCollection = Node.isCollection;\nexports.isDocument = Node.isDocument;\nexports.isMap = Node.isMap;\nexports.isNode = Node.isNode;\nexports.isPair = Node.isPair;\nexports.isScalar = Node.isScalar;\nexports.isSeq = Node.isSeq;\nexports.Pair = Pair.Pair;\nexports.Scalar = Scalar.Scalar;\nexports.YAMLMap = YAMLMap.YAMLMap;\nexports.YAMLSeq = YAMLSeq.YAMLSeq;\nexports.CST = cst;\nexports.Lexer = lexer.Lexer;\nexports.LineCounter = lineCounter.LineCounter;\nexports.Parser = parser.Parser;\nexports.parse = publicApi.parse;\nexports.parseAllDocuments = publicApi.parseAllDocuments;\nexports.parseDocument = publicApi.parseDocument;\nexports.stringify = publicApi.stringify;\nexports.visit = visit.visit;\nexports.visitAsync = visit.visitAsync;\n","'use strict';\n\nfunction debug(logLevel, ...messages) {\n if (logLevel === 'debug')\n console.log(...messages);\n}\nfunction warn(logLevel, warning) {\n if (logLevel === 'debug' || logLevel === 'warn') {\n if (typeof process !== 'undefined' && process.emitWarning)\n process.emitWarning(warning);\n else\n console.warn(warning);\n }\n}\n\nexports.debug = debug;\nexports.warn = warn;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar visit = require('../visit.js');\nvar Node = require('./Node.js');\n\nclass Alias extends Node.NodeBase {\n constructor(source) {\n super(Node.ALIAS);\n this.source = source;\n Object.defineProperty(this, 'tag', {\n set() {\n throw new Error('Alias nodes cannot have tags');\n }\n });\n }\n /**\n * Resolve the value of this alias within `doc`, finding the last\n * instance of the `source` anchor before this node.\n */\n resolve(doc) {\n let found = undefined;\n visit.visit(doc, {\n Node: (_key, node) => {\n if (node === this)\n return visit.visit.BREAK;\n if (node.anchor === this.source)\n found = node;\n }\n });\n return found;\n }\n toJSON(_arg, ctx) {\n if (!ctx)\n return { source: this.source };\n const { anchors, doc, maxAliasCount } = ctx;\n const source = this.resolve(doc);\n if (!source) {\n const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n throw new ReferenceError(msg);\n }\n const data = anchors.get(source);\n /* istanbul ignore if */\n if (!data || data.res === undefined) {\n const msg = 'This should not happen: Alias anchor was not resolved?';\n throw new ReferenceError(msg);\n }\n if (maxAliasCount >= 0) {\n data.count += 1;\n if (data.aliasCount === 0)\n data.aliasCount = getAliasCount(doc, source, anchors);\n if (data.count * data.aliasCount > maxAliasCount) {\n const msg = 'Excessive alias count indicates a resource exhaustion attack';\n throw new ReferenceError(msg);\n }\n }\n return data.res;\n }\n toString(ctx, _onComment, _onChompKeep) {\n const src = `*${this.source}`;\n if (ctx) {\n anchors.anchorIsValid(this.source);\n if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {\n const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n throw new Error(msg);\n }\n if (ctx.implicitKey)\n return `${src} `;\n }\n return src;\n }\n}\nfunction getAliasCount(doc, node, anchors) {\n if (Node.isAlias(node)) {\n const source = node.resolve(doc);\n const anchor = anchors && source && anchors.get(source);\n return anchor ? anchor.count * anchor.aliasCount : 0;\n }\n else if (Node.isCollection(node)) {\n let count = 0;\n for (const item of node.items) {\n const c = getAliasCount(doc, item, anchors);\n if (c > count)\n count = c;\n }\n return count;\n }\n else if (Node.isPair(node)) {\n const kc = getAliasCount(doc, node.key, anchors);\n const vc = getAliasCount(doc, node.value, anchors);\n return Math.max(kc, vc);\n }\n return 1;\n}\n\nexports.Alias = Alias;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar Node = require('./Node.js');\n\nfunction collectionFromPath(schema, path, value) {\n let v = value;\n for (let i = path.length - 1; i >= 0; --i) {\n const k = path[i];\n if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {\n const a = [];\n a[k] = v;\n v = a;\n }\n else {\n v = new Map([[k, v]]);\n }\n }\n return createNode.createNode(v, undefined, {\n aliasDuplicateObjects: false,\n keepUndefined: false,\n onAnchor: () => {\n throw new Error('This should not happen, please report a bug.');\n },\n schema,\n sourceObjects: new Map()\n });\n}\n// Type guard is intentionally a little wrong so as to be more useful,\n// as it does not cover untypable empty non-string iterables (e.g. []).\nconst isEmptyPath = (path) => path == null ||\n (typeof path === 'object' && !!path[Symbol.iterator]().next().done);\nclass Collection extends Node.NodeBase {\n constructor(type, schema) {\n super(type);\n Object.defineProperty(this, 'schema', {\n value: schema,\n configurable: true,\n enumerable: false,\n writable: true\n });\n }\n /**\n * Create a copy of this collection.\n *\n * @param schema - If defined, overwrites the original's schema\n */\n clone(schema) {\n const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n if (schema)\n copy.schema = schema;\n copy.items = copy.items.map(it => Node.isNode(it) || Node.isPair(it) ? it.clone(schema) : it);\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /**\n * Adds a value to the collection. For `!!map` and `!!omap` the value must\n * be a Pair instance or a `{ key, value }` object, which may not have a key\n * that already exists in the map.\n */\n addIn(path, value) {\n if (isEmptyPath(path))\n this.add(value);\n else {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (Node.isCollection(node))\n node.addIn(rest, value);\n else if (node === undefined && this.schema)\n this.set(key, collectionFromPath(this.schema, rest, value));\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n /**\n * Removes a value from the collection.\n * @returns `true` if the item was found and removed.\n */\n deleteIn(path) {\n const [key, ...rest] = path;\n if (rest.length === 0)\n return this.delete(key);\n const node = this.get(key, true);\n if (Node.isCollection(node))\n return node.deleteIn(rest);\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n /**\n * Returns item at `key`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n getIn(path, keepScalar) {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (rest.length === 0)\n return !keepScalar && Node.isScalar(node) ? node.value : node;\n else\n return Node.isCollection(node) ? node.getIn(rest, keepScalar) : undefined;\n }\n hasAllNullValues(allowScalar) {\n return this.items.every(node => {\n if (!Node.isPair(node))\n return false;\n const n = node.value;\n return (n == null ||\n (allowScalar &&\n Node.isScalar(n) &&\n n.value == null &&\n !n.commentBefore &&\n !n.comment &&\n !n.tag));\n });\n }\n /**\n * Checks if the collection includes a value with the key `key`.\n */\n hasIn(path) {\n const [key, ...rest] = path;\n if (rest.length === 0)\n return this.has(key);\n const node = this.get(key, true);\n return Node.isCollection(node) ? node.hasIn(rest) : false;\n }\n /**\n * Sets a value in this collection. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n setIn(path, value) {\n const [key, ...rest] = path;\n if (rest.length === 0) {\n this.set(key, value);\n }\n else {\n const node = this.get(key, true);\n if (Node.isCollection(node))\n node.setIn(rest, value);\n else if (node === undefined && this.schema)\n this.set(key, collectionFromPath(this.schema, rest, value));\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n}\nCollection.maxFlowStringSingleLineLength = 60;\n\nexports.Collection = Collection;\nexports.collectionFromPath = collectionFromPath;\nexports.isEmptyPath = isEmptyPath;\n","'use strict';\n\nconst ALIAS = Symbol.for('yaml.alias');\nconst DOC = Symbol.for('yaml.document');\nconst MAP = Symbol.for('yaml.map');\nconst PAIR = Symbol.for('yaml.pair');\nconst SCALAR = Symbol.for('yaml.scalar');\nconst SEQ = Symbol.for('yaml.seq');\nconst NODE_TYPE = Symbol.for('yaml.node.type');\nconst isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;\nconst isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;\nconst isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;\nconst isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;\nconst isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR;\nconst isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;\nfunction isCollection(node) {\n if (node && typeof node === 'object')\n switch (node[NODE_TYPE]) {\n case MAP:\n case SEQ:\n return true;\n }\n return false;\n}\nfunction isNode(node) {\n if (node && typeof node === 'object')\n switch (node[NODE_TYPE]) {\n case ALIAS:\n case MAP:\n case SCALAR:\n case SEQ:\n return true;\n }\n return false;\n}\nconst hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor;\nclass NodeBase {\n constructor(type) {\n Object.defineProperty(this, NODE_TYPE, { value: type });\n }\n /** Create a copy of this node. */\n clone() {\n const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n}\n\nexports.ALIAS = ALIAS;\nexports.DOC = DOC;\nexports.MAP = MAP;\nexports.NODE_TYPE = NODE_TYPE;\nexports.NodeBase = NodeBase;\nexports.PAIR = PAIR;\nexports.SCALAR = SCALAR;\nexports.SEQ = SEQ;\nexports.hasAnchor = hasAnchor;\nexports.isAlias = isAlias;\nexports.isCollection = isCollection;\nexports.isDocument = isDocument;\nexports.isMap = isMap;\nexports.isNode = isNode;\nexports.isPair = isPair;\nexports.isScalar = isScalar;\nexports.isSeq = isSeq;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar stringifyPair = require('../stringify/stringifyPair.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar Node = require('./Node.js');\n\nfunction createPair(key, value, ctx) {\n const k = createNode.createNode(key, undefined, ctx);\n const v = createNode.createNode(value, undefined, ctx);\n return new Pair(k, v);\n}\nclass Pair {\n constructor(key, value = null) {\n Object.defineProperty(this, Node.NODE_TYPE, { value: Node.PAIR });\n this.key = key;\n this.value = value;\n }\n clone(schema) {\n let { key, value } = this;\n if (Node.isNode(key))\n key = key.clone(schema);\n if (Node.isNode(value))\n value = value.clone(schema);\n return new Pair(key, value);\n }\n toJSON(_, ctx) {\n const pair = ctx?.mapAsMap ? new Map() : {};\n return addPairToJSMap.addPairToJSMap(ctx, pair, this);\n }\n toString(ctx, onComment, onChompKeep) {\n return ctx?.doc\n ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep)\n : JSON.stringify(this);\n }\n}\n\nexports.Pair = Pair;\nexports.createPair = createPair;\n","'use strict';\n\nvar Node = require('./Node.js');\nvar toJS = require('./toJS.js');\n\nconst isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');\nclass Scalar extends Node.NodeBase {\n constructor(value) {\n super(Node.SCALAR);\n this.value = value;\n }\n toJSON(arg, ctx) {\n return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx);\n }\n toString() {\n return String(this.value);\n }\n}\nScalar.BLOCK_FOLDED = 'BLOCK_FOLDED';\nScalar.BLOCK_LITERAL = 'BLOCK_LITERAL';\nScalar.PLAIN = 'PLAIN';\nScalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';\nScalar.QUOTE_SINGLE = 'QUOTE_SINGLE';\n\nexports.Scalar = Scalar;\nexports.isScalarValue = isScalarValue;\n","'use strict';\n\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar Collection = require('./Collection.js');\nvar Node = require('./Node.js');\nvar Pair = require('./Pair.js');\nvar Scalar = require('./Scalar.js');\n\nfunction findPair(items, key) {\n const k = Node.isScalar(key) ? key.value : key;\n for (const it of items) {\n if (Node.isPair(it)) {\n if (it.key === key || it.key === k)\n return it;\n if (Node.isScalar(it.key) && it.key.value === k)\n return it;\n }\n }\n return undefined;\n}\nclass YAMLMap extends Collection.Collection {\n static get tagName() {\n return 'tag:yaml.org,2002:map';\n }\n constructor(schema) {\n super(Node.MAP, schema);\n this.items = [];\n }\n /**\n * Adds a value to the collection.\n *\n * @param overwrite - If not set `true`, using a key that is already in the\n * collection will throw. Otherwise, overwrites the previous value.\n */\n add(pair, overwrite) {\n let _pair;\n if (Node.isPair(pair))\n _pair = pair;\n else if (!pair || typeof pair !== 'object' || !('key' in pair)) {\n // In TypeScript, this never happens.\n _pair = new Pair.Pair(pair, pair?.value);\n }\n else\n _pair = new Pair.Pair(pair.key, pair.value);\n const prev = findPair(this.items, _pair.key);\n const sortEntries = this.schema?.sortMapEntries;\n if (prev) {\n if (!overwrite)\n throw new Error(`Key ${_pair.key} already set`);\n // For scalars, keep the old node & its comments and anchors\n if (Node.isScalar(prev.value) && Scalar.isScalarValue(_pair.value))\n prev.value.value = _pair.value;\n else\n prev.value = _pair.value;\n }\n else if (sortEntries) {\n const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);\n if (i === -1)\n this.items.push(_pair);\n else\n this.items.splice(i, 0, _pair);\n }\n else {\n this.items.push(_pair);\n }\n }\n delete(key) {\n const it = findPair(this.items, key);\n if (!it)\n return false;\n const del = this.items.splice(this.items.indexOf(it), 1);\n return del.length > 0;\n }\n get(key, keepScalar) {\n const it = findPair(this.items, key);\n const node = it?.value;\n return (!keepScalar && Node.isScalar(node) ? node.value : node) ?? undefined;\n }\n has(key) {\n return !!findPair(this.items, key);\n }\n set(key, value) {\n this.add(new Pair.Pair(key, value), true);\n }\n /**\n * @param ctx - Conversion context, originally set in Document#toJS()\n * @param {Class} Type - If set, forces the returned collection type\n * @returns Instance of Type, Map, or Object\n */\n toJSON(_, ctx, Type) {\n const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {};\n if (ctx?.onCreate)\n ctx.onCreate(map);\n for (const item of this.items)\n addPairToJSMap.addPairToJSMap(ctx, map, item);\n return map;\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n for (const item of this.items) {\n if (!Node.isPair(item))\n throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);\n }\n if (!ctx.allNullValues && this.hasAllNullValues(false))\n ctx = Object.assign({}, ctx, { allNullValues: true });\n return stringifyCollection.stringifyCollection(this, ctx, {\n blockItemPrefix: '',\n flowChars: { start: '{', end: '}' },\n itemIndent: ctx.indent || '',\n onChompKeep,\n onComment\n });\n }\n}\n\nexports.YAMLMap = YAMLMap;\nexports.findPair = findPair;\n","'use strict';\n\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar Collection = require('./Collection.js');\nvar Node = require('./Node.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nclass YAMLSeq extends Collection.Collection {\n static get tagName() {\n return 'tag:yaml.org,2002:seq';\n }\n constructor(schema) {\n super(Node.SEQ, schema);\n this.items = [];\n }\n add(value) {\n this.items.push(value);\n }\n /**\n * Removes a value from the collection.\n *\n * `key` must contain a representation of an integer for this to succeed.\n * It may be wrapped in a `Scalar`.\n *\n * @returns `true` if the item was found and removed.\n */\n delete(key) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n return false;\n const del = this.items.splice(idx, 1);\n return del.length > 0;\n }\n get(key, keepScalar) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n return undefined;\n const it = this.items[idx];\n return !keepScalar && Node.isScalar(it) ? it.value : it;\n }\n /**\n * Checks if the collection includes a value with the key `key`.\n *\n * `key` must contain a representation of an integer for this to succeed.\n * It may be wrapped in a `Scalar`.\n */\n has(key) {\n const idx = asItemIndex(key);\n return typeof idx === 'number' && idx < this.items.length;\n }\n /**\n * Sets a value in this collection. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n *\n * If `key` does not contain a representation of an integer, this will throw.\n * It may be wrapped in a `Scalar`.\n */\n set(key, value) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n throw new Error(`Expected a valid index, not ${key}.`);\n const prev = this.items[idx];\n if (Node.isScalar(prev) && Scalar.isScalarValue(value))\n prev.value = value;\n else\n this.items[idx] = value;\n }\n toJSON(_, ctx) {\n const seq = [];\n if (ctx?.onCreate)\n ctx.onCreate(seq);\n let i = 0;\n for (const item of this.items)\n seq.push(toJS.toJS(item, String(i++), ctx));\n return seq;\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n return stringifyCollection.stringifyCollection(this, ctx, {\n blockItemPrefix: '- ',\n flowChars: { start: '[', end: ']' },\n itemIndent: (ctx.indent || '') + ' ',\n onChompKeep,\n onComment\n });\n }\n}\nfunction asItemIndex(key) {\n let idx = Node.isScalar(key) ? key.value : key;\n if (idx && typeof idx === 'string')\n idx = Number(idx);\n return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0\n ? idx\n : null;\n}\n\nexports.YAMLSeq = YAMLSeq;\n","'use strict';\n\nvar log = require('../log.js');\nvar stringify = require('../stringify/stringify.js');\nvar Node = require('./Node.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nconst MERGE_KEY = '<<';\nfunction addPairToJSMap(ctx, map, { key, value }) {\n if (ctx?.doc.schema.merge && isMergeKey(key)) {\n value = Node.isAlias(value) ? value.resolve(ctx.doc) : value;\n if (Node.isSeq(value))\n for (const it of value.items)\n mergeToJSMap(ctx, map, it);\n else if (Array.isArray(value))\n for (const it of value)\n mergeToJSMap(ctx, map, it);\n else\n mergeToJSMap(ctx, map, value);\n }\n else {\n const jsKey = toJS.toJS(key, '', ctx);\n if (map instanceof Map) {\n map.set(jsKey, toJS.toJS(value, jsKey, ctx));\n }\n else if (map instanceof Set) {\n map.add(jsKey);\n }\n else {\n const stringKey = stringifyKey(key, jsKey, ctx);\n const jsValue = toJS.toJS(value, stringKey, ctx);\n if (stringKey in map)\n Object.defineProperty(map, stringKey, {\n value: jsValue,\n writable: true,\n enumerable: true,\n configurable: true\n });\n else\n map[stringKey] = jsValue;\n }\n }\n return map;\n}\nconst isMergeKey = (key) => key === MERGE_KEY ||\n (Node.isScalar(key) &&\n key.value === MERGE_KEY &&\n (!key.type || key.type === Scalar.Scalar.PLAIN));\n// If the value associated with a merge key is a single mapping node, each of\n// its key/value pairs is inserted into the current mapping, unless the key\n// already exists in it. If the value associated with the merge key is a\n// sequence, then this sequence is expected to contain mapping nodes and each\n// of these nodes is merged in turn according to its order in the sequence.\n// Keys in mapping nodes earlier in the sequence override keys specified in\n// later mapping nodes. -- http://yaml.org/type/merge.html\nfunction mergeToJSMap(ctx, map, value) {\n const source = ctx && Node.isAlias(value) ? value.resolve(ctx.doc) : value;\n if (!Node.isMap(source))\n throw new Error('Merge sources must be maps or map aliases');\n const srcMap = source.toJSON(null, ctx, Map);\n for (const [key, value] of srcMap) {\n if (map instanceof Map) {\n if (!map.has(key))\n map.set(key, value);\n }\n else if (map instanceof Set) {\n map.add(key);\n }\n else if (!Object.prototype.hasOwnProperty.call(map, key)) {\n Object.defineProperty(map, key, {\n value,\n writable: true,\n enumerable: true,\n configurable: true\n });\n }\n }\n return map;\n}\nfunction stringifyKey(key, jsKey, ctx) {\n if (jsKey === null)\n return '';\n if (typeof jsKey !== 'object')\n return String(jsKey);\n if (Node.isNode(key) && ctx && ctx.doc) {\n const strCtx = stringify.createStringifyContext(ctx.doc, {});\n strCtx.anchors = new Set();\n for (const node of ctx.anchors.keys())\n strCtx.anchors.add(node.anchor);\n strCtx.inFlow = true;\n strCtx.inStringifyKey = true;\n const strKey = key.toString(strCtx);\n if (!ctx.mapKeyWarned) {\n let jsonStr = JSON.stringify(strKey);\n if (jsonStr.length > 40)\n jsonStr = jsonStr.substring(0, 36) + '...\"';\n log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);\n ctx.mapKeyWarned = true;\n }\n return strKey;\n }\n return JSON.stringify(jsKey);\n}\n\nexports.addPairToJSMap = addPairToJSMap;\n","'use strict';\n\nvar Node = require('./Node.js');\n\n/**\n * Recursively convert any node or its contents to native JavaScript\n *\n * @param value - The input value\n * @param arg - If `value` defines a `toJSON()` method, use this\n * as its first argument\n * @param ctx - Conversion context, originally set in Document#toJS(). If\n * `{ keep: true }` is not set, output should be suitable for JSON\n * stringification.\n */\nfunction toJS(value, arg, ctx) {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n if (Array.isArray(value))\n return value.map((v, i) => toJS(v, String(i), ctx));\n if (value && typeof value.toJSON === 'function') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n if (!ctx || !Node.hasAnchor(value))\n return value.toJSON(arg, ctx);\n const data = { aliasCount: 0, count: 1, res: undefined };\n ctx.anchors.set(value, data);\n ctx.onCreate = res => {\n data.res = res;\n delete ctx.onCreate;\n };\n const res = value.toJSON(arg, ctx);\n if (ctx.onCreate)\n ctx.onCreate(res);\n return res;\n }\n if (typeof value === 'bigint' && !ctx?.keep)\n return Number(value);\n return value;\n}\n\nexports.toJS = toJS;\n","'use strict';\n\nvar resolveBlockScalar = require('../compose/resolve-block-scalar.js');\nvar resolveFlowScalar = require('../compose/resolve-flow-scalar.js');\nvar errors = require('../errors.js');\nvar stringifyString = require('../stringify/stringifyString.js');\n\nfunction resolveAsScalar(token, strict = true, onError) {\n if (token) {\n const _onError = (pos, code, message) => {\n const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;\n if (onError)\n onError(offset, code, message);\n else\n throw new errors.YAMLParseError([offset, offset + 1], code, message);\n };\n switch (token.type) {\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return resolveFlowScalar.resolveFlowScalar(token, strict, _onError);\n case 'block-scalar':\n return resolveBlockScalar.resolveBlockScalar(token, strict, _onError);\n }\n }\n return null;\n}\n/**\n * Create a new scalar token with `value`\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.indent The indent level of the token.\n * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.\n * @param context.offset The offset position of the token.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction createScalarToken(value, context) {\n const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;\n const source = stringifyString.stringifyString({ type, value }, {\n implicitKey,\n indent: indent > 0 ? ' '.repeat(indent) : '',\n inFlow,\n options: { blockQuote: true, lineWidth: -1 }\n });\n const end = context.end ?? [\n { type: 'newline', offset: -1, indent, source: '\\n' }\n ];\n switch (source[0]) {\n case '|':\n case '>': {\n const he = source.indexOf('\\n');\n const head = source.substring(0, he);\n const body = source.substring(he + 1) + '\\n';\n const props = [\n { type: 'block-scalar-header', offset, indent, source: head }\n ];\n if (!addEndtoBlockProps(props, end))\n props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n return { type: 'block-scalar', offset, indent, props, source: body };\n }\n case '\"':\n return { type: 'double-quoted-scalar', offset, indent, source, end };\n case \"'\":\n return { type: 'single-quoted-scalar', offset, indent, source, end };\n default:\n return { type: 'scalar', offset, indent, source, end };\n }\n}\n/**\n * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.\n *\n * Best efforts are made to retain any comments previously associated with the `token`,\n * though all contents within a collection's `items` will be overwritten.\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.afterKey In most cases, values after a key should have an additional level of indentation.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction setScalarValue(token, value, context = {}) {\n let { afterKey = false, implicitKey = false, inFlow = false, type } = context;\n let indent = 'indent' in token ? token.indent : null;\n if (afterKey && typeof indent === 'number')\n indent += 2;\n if (!type)\n switch (token.type) {\n case 'single-quoted-scalar':\n type = 'QUOTE_SINGLE';\n break;\n case 'double-quoted-scalar':\n type = 'QUOTE_DOUBLE';\n break;\n case 'block-scalar': {\n const header = token.props[0];\n if (header.type !== 'block-scalar-header')\n throw new Error('Invalid block scalar header');\n type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';\n break;\n }\n default:\n type = 'PLAIN';\n }\n const source = stringifyString.stringifyString({ type, value }, {\n implicitKey: implicitKey || indent === null,\n indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',\n inFlow,\n options: { blockQuote: true, lineWidth: -1 }\n });\n switch (source[0]) {\n case '|':\n case '>':\n setBlockScalarValue(token, source);\n break;\n case '\"':\n setFlowScalarValue(token, source, 'double-quoted-scalar');\n break;\n case \"'\":\n setFlowScalarValue(token, source, 'single-quoted-scalar');\n break;\n default:\n setFlowScalarValue(token, source, 'scalar');\n }\n}\nfunction setBlockScalarValue(token, source) {\n const he = source.indexOf('\\n');\n const head = source.substring(0, he);\n const body = source.substring(he + 1) + '\\n';\n if (token.type === 'block-scalar') {\n const header = token.props[0];\n if (header.type !== 'block-scalar-header')\n throw new Error('Invalid block scalar header');\n header.source = head;\n token.source = body;\n }\n else {\n const { offset } = token;\n const indent = 'indent' in token ? token.indent : -1;\n const props = [\n { type: 'block-scalar-header', offset, indent, source: head }\n ];\n if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))\n props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n for (const key of Object.keys(token))\n if (key !== 'type' && key !== 'offset')\n delete token[key];\n Object.assign(token, { type: 'block-scalar', indent, props, source: body });\n }\n}\n/** @returns `true` if last token is a newline */\nfunction addEndtoBlockProps(props, end) {\n if (end)\n for (const st of end)\n switch (st.type) {\n case 'space':\n case 'comment':\n props.push(st);\n break;\n case 'newline':\n props.push(st);\n return true;\n }\n return false;\n}\nfunction setFlowScalarValue(token, source, type) {\n switch (token.type) {\n case 'scalar':\n case 'double-quoted-scalar':\n case 'single-quoted-scalar':\n token.type = type;\n token.source = source;\n break;\n case 'block-scalar': {\n const end = token.props.slice(1);\n let oa = source.length;\n if (token.props[0].type === 'block-scalar-header')\n oa -= token.props[0].source.length;\n for (const tok of end)\n tok.offset += oa;\n delete token.props;\n Object.assign(token, { type, source, end });\n break;\n }\n case 'block-map':\n case 'block-seq': {\n const offset = token.offset + source.length;\n const nl = { type: 'newline', offset, indent: token.indent, source: '\\n' };\n delete token.items;\n Object.assign(token, { type, source, end: [nl] });\n break;\n }\n default: {\n const indent = 'indent' in token ? token.indent : -1;\n const end = 'end' in token && Array.isArray(token.end)\n ? token.end.filter(st => st.type === 'space' ||\n st.type === 'comment' ||\n st.type === 'newline')\n : [];\n for (const key of Object.keys(token))\n if (key !== 'type' && key !== 'offset')\n delete token[key];\n Object.assign(token, { type, indent, source, end });\n }\n }\n}\n\nexports.createScalarToken = createScalarToken;\nexports.resolveAsScalar = resolveAsScalar;\nexports.setScalarValue = setScalarValue;\n","'use strict';\n\n/**\n * Stringify a CST document, token, or collection item\n *\n * Fair warning: This applies no validation whatsoever, and\n * simply concatenates the sources in their logical order.\n */\nconst stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);\nfunction stringifyToken(token) {\n switch (token.type) {\n case 'block-scalar': {\n let res = '';\n for (const tok of token.props)\n res += stringifyToken(tok);\n return res + token.source;\n }\n case 'block-map':\n case 'block-seq': {\n let res = '';\n for (const item of token.items)\n res += stringifyItem(item);\n return res;\n }\n case 'flow-collection': {\n let res = token.start.source;\n for (const item of token.items)\n res += stringifyItem(item);\n for (const st of token.end)\n res += st.source;\n return res;\n }\n case 'document': {\n let res = stringifyItem(token);\n if (token.end)\n for (const st of token.end)\n res += st.source;\n return res;\n }\n default: {\n let res = token.source;\n if ('end' in token && token.end)\n for (const st of token.end)\n res += st.source;\n return res;\n }\n }\n}\nfunction stringifyItem({ start, key, sep, value }) {\n let res = '';\n for (const st of start)\n res += st.source;\n if (key)\n res += stringifyToken(key);\n if (sep)\n for (const st of sep)\n res += st.source;\n if (value)\n res += stringifyToken(value);\n return res;\n}\n\nexports.stringify = stringify;\n","'use strict';\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove item');\n/**\n * Apply a visitor to a CST document or item.\n *\n * Walks through the tree (depth-first) starting from the root, calling a\n * `visitor` function with two arguments when entering each item:\n * - `item`: The current item, which included the following members:\n * - `start: SourceToken[]` – Source tokens before the key or value,\n * possibly including its anchor or tag.\n * - `key?: Token | null` – Set for pair values. May then be `null`, if\n * the key before the `:` separator is empty.\n * - `sep?: SourceToken[]` – Source tokens between the key and the value,\n * which should include the `:` map value indicator if `value` is set.\n * - `value?: Token` – The value of a sequence item, or of a map pair.\n * - `path`: The steps from the root to the current node, as an array of\n * `['key' | 'value', number]` tuples.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this token, continue with\n * next sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current item, then continue with the next one\n * - `number`: Set the index of the next step. This is useful especially if\n * the index of the current token has changed.\n * - `function`: Define the next visitor for this item. After the original\n * visitor is called on item entry, next visitors are called after handling\n * a non-empty `key` and when exiting the item.\n */\nfunction visit(cst, visitor) {\n if ('type' in cst && cst.type === 'document')\n cst = { start: cst.start, value: cst.value };\n _visit(Object.freeze([]), cst, visitor);\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current item */\nvisit.SKIP = SKIP;\n/** Remove the current item */\nvisit.REMOVE = REMOVE;\n/** Find the item at `path` from `cst` as the root */\nvisit.itemAtPath = (cst, path) => {\n let item = cst;\n for (const [field, index] of path) {\n const tok = item?.[field];\n if (tok && 'items' in tok) {\n item = tok.items[index];\n }\n else\n return undefined;\n }\n return item;\n};\n/**\n * Get the immediate parent collection of the item at `path` from `cst` as the root.\n *\n * Throws an error if the collection is not found, which should never happen if the item itself exists.\n */\nvisit.parentCollection = (cst, path) => {\n const parent = visit.itemAtPath(cst, path.slice(0, -1));\n const field = path[path.length - 1][0];\n const coll = parent?.[field];\n if (coll && 'items' in coll)\n return coll;\n throw new Error('Parent collection not found');\n};\nfunction _visit(path, item, visitor) {\n let ctrl = visitor(item, path);\n if (typeof ctrl === 'symbol')\n return ctrl;\n for (const field of ['key', 'value']) {\n const token = item[field];\n if (token && 'items' in token) {\n for (let i = 0; i < token.items.length; ++i) {\n const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n token.items.splice(i, 1);\n i -= 1;\n }\n }\n if (typeof ctrl === 'function' && field === 'key')\n ctrl = ctrl(item, path);\n }\n }\n return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;\n}\n\nexports.visit = visit;\n","'use strict';\n\nvar cstScalar = require('./cst-scalar.js');\nvar cstStringify = require('./cst-stringify.js');\nvar cstVisit = require('./cst-visit.js');\n\n/** The byte order mark */\nconst BOM = '\\u{FEFF}';\n/** Start of doc-mode */\nconst DOCUMENT = '\\x02'; // C0: Start of Text\n/** Unexpected end of flow-mode */\nconst FLOW_END = '\\x18'; // C0: Cancel\n/** Next token is a scalar value */\nconst SCALAR = '\\x1f'; // C0: Unit Separator\n/** @returns `true` if `token` is a flow or block collection */\nconst isCollection = (token) => !!token && 'items' in token;\n/** @returns `true` if `token` is a flow or block scalar; not an alias */\nconst isScalar = (token) => !!token &&\n (token.type === 'scalar' ||\n token.type === 'single-quoted-scalar' ||\n token.type === 'double-quoted-scalar' ||\n token.type === 'block-scalar');\n/* istanbul ignore next */\n/** Get a printable representation of a lexer token */\nfunction prettyToken(token) {\n switch (token) {\n case BOM:\n return '';\n case DOCUMENT:\n return '';\n case FLOW_END:\n return '';\n case SCALAR:\n return '';\n default:\n return JSON.stringify(token);\n }\n}\n/** Identify the type of a lexer token. May return `null` for unknown tokens. */\nfunction tokenType(source) {\n switch (source) {\n case BOM:\n return 'byte-order-mark';\n case DOCUMENT:\n return 'doc-mode';\n case FLOW_END:\n return 'flow-error-end';\n case SCALAR:\n return 'scalar';\n case '---':\n return 'doc-start';\n case '...':\n return 'doc-end';\n case '':\n case '\\n':\n case '\\r\\n':\n return 'newline';\n case '-':\n return 'seq-item-ind';\n case '?':\n return 'explicit-key-ind';\n case ':':\n return 'map-value-ind';\n case '{':\n return 'flow-map-start';\n case '}':\n return 'flow-map-end';\n case '[':\n return 'flow-seq-start';\n case ']':\n return 'flow-seq-end';\n case ',':\n return 'comma';\n }\n switch (source[0]) {\n case ' ':\n case '\\t':\n return 'space';\n case '#':\n return 'comment';\n case '%':\n return 'directive-line';\n case '*':\n return 'alias';\n case '&':\n return 'anchor';\n case '!':\n return 'tag';\n case \"'\":\n return 'single-quoted-scalar';\n case '\"':\n return 'double-quoted-scalar';\n case '|':\n case '>':\n return 'block-scalar-header';\n }\n return null;\n}\n\nexports.createScalarToken = cstScalar.createScalarToken;\nexports.resolveAsScalar = cstScalar.resolveAsScalar;\nexports.setScalarValue = cstScalar.setScalarValue;\nexports.stringify = cstStringify.stringify;\nexports.visit = cstVisit.visit;\nexports.BOM = BOM;\nexports.DOCUMENT = DOCUMENT;\nexports.FLOW_END = FLOW_END;\nexports.SCALAR = SCALAR;\nexports.isCollection = isCollection;\nexports.isScalar = isScalar;\nexports.prettyToken = prettyToken;\nexports.tokenType = tokenType;\n","'use strict';\n\nvar cst = require('./cst.js');\n\n/*\nSTART -> stream\n\nstream\n directive -> line-end -> stream\n indent + line-end -> stream\n [else] -> line-start\n\nline-end\n comment -> line-end\n newline -> .\n input-end -> END\n\nline-start\n doc-start -> doc\n doc-end -> stream\n [else] -> indent -> block-start\n\nblock-start\n seq-item-start -> block-start\n explicit-key-start -> block-start\n map-value-start -> block-start\n [else] -> doc\n\ndoc\n line-end -> line-start\n spaces -> doc\n anchor -> doc\n tag -> doc\n flow-start -> flow -> doc\n flow-end -> error -> doc\n seq-item-start -> error -> doc\n explicit-key-start -> error -> doc\n map-value-start -> doc\n alias -> doc\n quote-start -> quoted-scalar -> doc\n block-scalar-header -> line-end -> block-scalar(min) -> line-start\n [else] -> plain-scalar(false, min) -> doc\n\nflow\n line-end -> flow\n spaces -> flow\n anchor -> flow\n tag -> flow\n flow-start -> flow -> flow\n flow-end -> .\n seq-item-start -> error -> flow\n explicit-key-start -> flow\n map-value-start -> flow\n alias -> flow\n quote-start -> quoted-scalar -> flow\n comma -> flow\n [else] -> plain-scalar(true, 0) -> flow\n\nquoted-scalar\n quote-end -> .\n [else] -> quoted-scalar\n\nblock-scalar(min)\n newline + peek(indent < min) -> .\n [else] -> block-scalar(min)\n\nplain-scalar(is-flow, min)\n scalar-end(is-flow) -> .\n peek(newline + (indent < min)) -> .\n [else] -> plain-scalar(min)\n*/\nfunction isEmpty(ch) {\n switch (ch) {\n case undefined:\n case ' ':\n case '\\n':\n case '\\r':\n case '\\t':\n return true;\n default:\n return false;\n }\n}\nconst hexDigits = '0123456789ABCDEFabcdef'.split('');\nconst tagChars = \"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()\".split('');\nconst invalidFlowScalarChars = ',[]{}'.split('');\nconst invalidAnchorChars = ' ,[]{}\\n\\r\\t'.split('');\nconst isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);\n/**\n * Splits an input string into lexical tokens, i.e. smaller strings that are\n * easily identifiable by `tokens.tokenType()`.\n *\n * Lexing starts always in a \"stream\" context. Incomplete input may be buffered\n * until a complete token can be emitted.\n *\n * In addition to slices of the original input, the following control characters\n * may also be emitted:\n *\n * - `\\x02` (Start of Text): A document starts with the next token\n * - `\\x18` (Cancel): Unexpected end of flow-mode (indicates an error)\n * - `\\x1f` (Unit Separator): Next token is a scalar value\n * - `\\u{FEFF}` (Byte order mark): Emitted separately outside documents\n */\nclass Lexer {\n constructor() {\n /**\n * Flag indicating whether the end of the current buffer marks the end of\n * all input\n */\n this.atEnd = false;\n /**\n * Explicit indent set in block scalar header, as an offset from the current\n * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not\n * explicitly set.\n */\n this.blockScalarIndent = -1;\n /**\n * Block scalars that include a + (keep) chomping indicator in their header\n * include trailing empty lines, which are otherwise excluded from the\n * scalar's contents.\n */\n this.blockScalarKeep = false;\n /** Current input */\n this.buffer = '';\n /**\n * Flag noting whether the map value indicator : can immediately follow this\n * node within a flow context.\n */\n this.flowKey = false;\n /** Count of surrounding flow collection levels. */\n this.flowLevel = 0;\n /**\n * Minimum level of indentation required for next lines to be parsed as a\n * part of the current scalar value.\n */\n this.indentNext = 0;\n /** Indentation level of the current line. */\n this.indentValue = 0;\n /** Position of the next \\n character. */\n this.lineEndPos = null;\n /** Stores the state of the lexer if reaching the end of incpomplete input */\n this.next = null;\n /** A pointer to `buffer`; the current position of the lexer. */\n this.pos = 0;\n }\n /**\n * Generate YAML tokens from the `source` string. If `incomplete`,\n * a part of the last line may be left as a buffer for the next call.\n *\n * @returns A generator of lexical tokens\n */\n *lex(source, incomplete = false) {\n if (source) {\n this.buffer = this.buffer ? this.buffer + source : source;\n this.lineEndPos = null;\n }\n this.atEnd = !incomplete;\n let next = this.next ?? 'stream';\n while (next && (incomplete || this.hasChars(1)))\n next = yield* this.parseNext(next);\n }\n atLineEnd() {\n let i = this.pos;\n let ch = this.buffer[i];\n while (ch === ' ' || ch === '\\t')\n ch = this.buffer[++i];\n if (!ch || ch === '#' || ch === '\\n')\n return true;\n if (ch === '\\r')\n return this.buffer[i + 1] === '\\n';\n return false;\n }\n charAt(n) {\n return this.buffer[this.pos + n];\n }\n continueScalar(offset) {\n let ch = this.buffer[offset];\n if (this.indentNext > 0) {\n let indent = 0;\n while (ch === ' ')\n ch = this.buffer[++indent + offset];\n if (ch === '\\r') {\n const next = this.buffer[indent + offset + 1];\n if (next === '\\n' || (!next && !this.atEnd))\n return offset + indent + 1;\n }\n return ch === '\\n' || indent >= this.indentNext || (!ch && !this.atEnd)\n ? offset + indent\n : -1;\n }\n if (ch === '-' || ch === '.') {\n const dt = this.buffer.substr(offset, 3);\n if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))\n return -1;\n }\n return offset;\n }\n getLine() {\n let end = this.lineEndPos;\n if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {\n end = this.buffer.indexOf('\\n', this.pos);\n this.lineEndPos = end;\n }\n if (end === -1)\n return this.atEnd ? this.buffer.substring(this.pos) : null;\n if (this.buffer[end - 1] === '\\r')\n end -= 1;\n return this.buffer.substring(this.pos, end);\n }\n hasChars(n) {\n return this.pos + n <= this.buffer.length;\n }\n setNext(state) {\n this.buffer = this.buffer.substring(this.pos);\n this.pos = 0;\n this.lineEndPos = null;\n this.next = state;\n return null;\n }\n peek(n) {\n return this.buffer.substr(this.pos, n);\n }\n *parseNext(next) {\n switch (next) {\n case 'stream':\n return yield* this.parseStream();\n case 'line-start':\n return yield* this.parseLineStart();\n case 'block-start':\n return yield* this.parseBlockStart();\n case 'doc':\n return yield* this.parseDocument();\n case 'flow':\n return yield* this.parseFlowCollection();\n case 'quoted-scalar':\n return yield* this.parseQuotedScalar();\n case 'block-scalar':\n return yield* this.parseBlockScalar();\n case 'plain-scalar':\n return yield* this.parsePlainScalar();\n }\n }\n *parseStream() {\n let line = this.getLine();\n if (line === null)\n return this.setNext('stream');\n if (line[0] === cst.BOM) {\n yield* this.pushCount(1);\n line = line.substring(1);\n }\n if (line[0] === '%') {\n let dirEnd = line.length;\n const cs = line.indexOf('#');\n if (cs !== -1) {\n const ch = line[cs - 1];\n if (ch === ' ' || ch === '\\t')\n dirEnd = cs - 1;\n }\n while (true) {\n const ch = line[dirEnd - 1];\n if (ch === ' ' || ch === '\\t')\n dirEnd -= 1;\n else\n break;\n }\n const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));\n yield* this.pushCount(line.length - n); // possible comment\n this.pushNewline();\n return 'stream';\n }\n if (this.atLineEnd()) {\n const sp = yield* this.pushSpaces(true);\n yield* this.pushCount(line.length - sp);\n yield* this.pushNewline();\n return 'stream';\n }\n yield cst.DOCUMENT;\n return yield* this.parseLineStart();\n }\n *parseLineStart() {\n const ch = this.charAt(0);\n if (!ch && !this.atEnd)\n return this.setNext('line-start');\n if (ch === '-' || ch === '.') {\n if (!this.atEnd && !this.hasChars(4))\n return this.setNext('line-start');\n const s = this.peek(3);\n if (s === '---' && isEmpty(this.charAt(3))) {\n yield* this.pushCount(3);\n this.indentValue = 0;\n this.indentNext = 0;\n return 'doc';\n }\n else if (s === '...' && isEmpty(this.charAt(3))) {\n yield* this.pushCount(3);\n return 'stream';\n }\n }\n this.indentValue = yield* this.pushSpaces(false);\n if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))\n this.indentNext = this.indentValue;\n return yield* this.parseBlockStart();\n }\n *parseBlockStart() {\n const [ch0, ch1] = this.peek(2);\n if (!ch1 && !this.atEnd)\n return this.setNext('block-start');\n if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {\n const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));\n this.indentNext = this.indentValue + 1;\n this.indentValue += n;\n return yield* this.parseBlockStart();\n }\n return 'doc';\n }\n *parseDocument() {\n yield* this.pushSpaces(true);\n const line = this.getLine();\n if (line === null)\n return this.setNext('doc');\n let n = yield* this.pushIndicators();\n switch (line[n]) {\n case '#':\n yield* this.pushCount(line.length - n);\n // fallthrough\n case undefined:\n yield* this.pushNewline();\n return yield* this.parseLineStart();\n case '{':\n case '[':\n yield* this.pushCount(1);\n this.flowKey = false;\n this.flowLevel = 1;\n return 'flow';\n case '}':\n case ']':\n // this is an error\n yield* this.pushCount(1);\n return 'doc';\n case '*':\n yield* this.pushUntil(isNotAnchorChar);\n return 'doc';\n case '\"':\n case \"'\":\n return yield* this.parseQuotedScalar();\n case '|':\n case '>':\n n += yield* this.parseBlockScalarHeader();\n n += yield* this.pushSpaces(true);\n yield* this.pushCount(line.length - n);\n yield* this.pushNewline();\n return yield* this.parseBlockScalar();\n default:\n return yield* this.parsePlainScalar();\n }\n }\n *parseFlowCollection() {\n let nl, sp;\n let indent = -1;\n do {\n nl = yield* this.pushNewline();\n if (nl > 0) {\n sp = yield* this.pushSpaces(false);\n this.indentValue = indent = sp;\n }\n else {\n sp = 0;\n }\n sp += yield* this.pushSpaces(true);\n } while (nl + sp > 0);\n const line = this.getLine();\n if (line === null)\n return this.setNext('flow');\n if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||\n (indent === 0 &&\n (line.startsWith('---') || line.startsWith('...')) &&\n isEmpty(line[3]))) {\n // Allowing for the terminal ] or } at the same (rather than greater)\n // indent level as the initial [ or { is technically invalid, but\n // failing here would be surprising to users.\n const atFlowEndMarker = indent === this.indentNext - 1 &&\n this.flowLevel === 1 &&\n (line[0] === ']' || line[0] === '}');\n if (!atFlowEndMarker) {\n // this is an error\n this.flowLevel = 0;\n yield cst.FLOW_END;\n return yield* this.parseLineStart();\n }\n }\n let n = 0;\n while (line[n] === ',') {\n n += yield* this.pushCount(1);\n n += yield* this.pushSpaces(true);\n this.flowKey = false;\n }\n n += yield* this.pushIndicators();\n switch (line[n]) {\n case undefined:\n return 'flow';\n case '#':\n yield* this.pushCount(line.length - n);\n return 'flow';\n case '{':\n case '[':\n yield* this.pushCount(1);\n this.flowKey = false;\n this.flowLevel += 1;\n return 'flow';\n case '}':\n case ']':\n yield* this.pushCount(1);\n this.flowKey = true;\n this.flowLevel -= 1;\n return this.flowLevel ? 'flow' : 'doc';\n case '*':\n yield* this.pushUntil(isNotAnchorChar);\n return 'flow';\n case '\"':\n case \"'\":\n this.flowKey = true;\n return yield* this.parseQuotedScalar();\n case ':': {\n const next = this.charAt(1);\n if (this.flowKey || isEmpty(next) || next === ',') {\n this.flowKey = false;\n yield* this.pushCount(1);\n yield* this.pushSpaces(true);\n return 'flow';\n }\n }\n // fallthrough\n default:\n this.flowKey = false;\n return yield* this.parsePlainScalar();\n }\n }\n *parseQuotedScalar() {\n const quote = this.charAt(0);\n let end = this.buffer.indexOf(quote, this.pos + 1);\n if (quote === \"'\") {\n while (end !== -1 && this.buffer[end + 1] === \"'\")\n end = this.buffer.indexOf(\"'\", end + 2);\n }\n else {\n // double-quote\n while (end !== -1) {\n let n = 0;\n while (this.buffer[end - 1 - n] === '\\\\')\n n += 1;\n if (n % 2 === 0)\n break;\n end = this.buffer.indexOf('\"', end + 1);\n }\n }\n // Only looking for newlines within the quotes\n const qb = this.buffer.substring(0, end);\n let nl = qb.indexOf('\\n', this.pos);\n if (nl !== -1) {\n while (nl !== -1) {\n const cs = this.continueScalar(nl + 1);\n if (cs === -1)\n break;\n nl = qb.indexOf('\\n', cs);\n }\n if (nl !== -1) {\n // this is an error caused by an unexpected unindent\n end = nl - (qb[nl - 1] === '\\r' ? 2 : 1);\n }\n }\n if (end === -1) {\n if (!this.atEnd)\n return this.setNext('quoted-scalar');\n end = this.buffer.length;\n }\n yield* this.pushToIndex(end + 1, false);\n return this.flowLevel ? 'flow' : 'doc';\n }\n *parseBlockScalarHeader() {\n this.blockScalarIndent = -1;\n this.blockScalarKeep = false;\n let i = this.pos;\n while (true) {\n const ch = this.buffer[++i];\n if (ch === '+')\n this.blockScalarKeep = true;\n else if (ch > '0' && ch <= '9')\n this.blockScalarIndent = Number(ch) - 1;\n else if (ch !== '-')\n break;\n }\n return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');\n }\n *parseBlockScalar() {\n let nl = this.pos - 1; // may be -1 if this.pos === 0\n let indent = 0;\n let ch;\n loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {\n switch (ch) {\n case ' ':\n indent += 1;\n break;\n case '\\n':\n nl = i;\n indent = 0;\n break;\n case '\\r': {\n const next = this.buffer[i + 1];\n if (!next && !this.atEnd)\n return this.setNext('block-scalar');\n if (next === '\\n')\n break;\n } // fallthrough\n default:\n break loop;\n }\n }\n if (!ch && !this.atEnd)\n return this.setNext('block-scalar');\n if (indent >= this.indentNext) {\n if (this.blockScalarIndent === -1)\n this.indentNext = indent;\n else\n this.indentNext += this.blockScalarIndent;\n do {\n const cs = this.continueScalar(nl + 1);\n if (cs === -1)\n break;\n nl = this.buffer.indexOf('\\n', cs);\n } while (nl !== -1);\n if (nl === -1) {\n if (!this.atEnd)\n return this.setNext('block-scalar');\n nl = this.buffer.length;\n }\n }\n if (!this.blockScalarKeep) {\n do {\n let i = nl - 1;\n let ch = this.buffer[i];\n if (ch === '\\r')\n ch = this.buffer[--i];\n const lastChar = i; // Drop the line if last char not more indented\n while (ch === ' ' || ch === '\\t')\n ch = this.buffer[--i];\n if (ch === '\\n' && i >= this.pos && i + 1 + indent > lastChar)\n nl = i;\n else\n break;\n } while (true);\n }\n yield cst.SCALAR;\n yield* this.pushToIndex(nl + 1, true);\n return yield* this.parseLineStart();\n }\n *parsePlainScalar() {\n const inFlow = this.flowLevel > 0;\n let end = this.pos - 1;\n let i = this.pos - 1;\n let ch;\n while ((ch = this.buffer[++i])) {\n if (ch === ':') {\n const next = this.buffer[i + 1];\n if (isEmpty(next) || (inFlow && next === ','))\n break;\n end = i;\n }\n else if (isEmpty(ch)) {\n let next = this.buffer[i + 1];\n if (ch === '\\r') {\n if (next === '\\n') {\n i += 1;\n ch = '\\n';\n next = this.buffer[i + 1];\n }\n else\n end = i;\n }\n if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))\n break;\n if (ch === '\\n') {\n const cs = this.continueScalar(i + 1);\n if (cs === -1)\n break;\n i = Math.max(i, cs - 2); // to advance, but still account for ' #'\n }\n }\n else {\n if (inFlow && invalidFlowScalarChars.includes(ch))\n break;\n end = i;\n }\n }\n if (!ch && !this.atEnd)\n return this.setNext('plain-scalar');\n yield cst.SCALAR;\n yield* this.pushToIndex(end + 1, true);\n return inFlow ? 'flow' : 'doc';\n }\n *pushCount(n) {\n if (n > 0) {\n yield this.buffer.substr(this.pos, n);\n this.pos += n;\n return n;\n }\n return 0;\n }\n *pushToIndex(i, allowEmpty) {\n const s = this.buffer.slice(this.pos, i);\n if (s) {\n yield s;\n this.pos += s.length;\n return s.length;\n }\n else if (allowEmpty)\n yield '';\n return 0;\n }\n *pushIndicators() {\n switch (this.charAt(0)) {\n case '!':\n return ((yield* this.pushTag()) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n case '&':\n return ((yield* this.pushUntil(isNotAnchorChar)) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n case '-': // this is an error\n case '?': // this is an error outside flow collections\n case ':': {\n const inFlow = this.flowLevel > 0;\n const ch1 = this.charAt(1);\n if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {\n if (!inFlow)\n this.indentNext = this.indentValue + 1;\n else if (this.flowKey)\n this.flowKey = false;\n return ((yield* this.pushCount(1)) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n }\n }\n }\n return 0;\n }\n *pushTag() {\n if (this.charAt(1) === '<') {\n let i = this.pos + 2;\n let ch = this.buffer[i];\n while (!isEmpty(ch) && ch !== '>')\n ch = this.buffer[++i];\n return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);\n }\n else {\n let i = this.pos + 1;\n let ch = this.buffer[i];\n while (ch) {\n if (tagChars.includes(ch))\n ch = this.buffer[++i];\n else if (ch === '%' &&\n hexDigits.includes(this.buffer[i + 1]) &&\n hexDigits.includes(this.buffer[i + 2])) {\n ch = this.buffer[(i += 3)];\n }\n else\n break;\n }\n return yield* this.pushToIndex(i, false);\n }\n }\n *pushNewline() {\n const ch = this.buffer[this.pos];\n if (ch === '\\n')\n return yield* this.pushCount(1);\n else if (ch === '\\r' && this.charAt(1) === '\\n')\n return yield* this.pushCount(2);\n else\n return 0;\n }\n *pushSpaces(allowTabs) {\n let i = this.pos - 1;\n let ch;\n do {\n ch = this.buffer[++i];\n } while (ch === ' ' || (allowTabs && ch === '\\t'));\n const n = i - this.pos;\n if (n > 0) {\n yield this.buffer.substr(this.pos, n);\n this.pos = i;\n }\n return n;\n }\n *pushUntil(test) {\n let i = this.pos;\n let ch = this.buffer[i];\n while (!test(ch))\n ch = this.buffer[++i];\n return yield* this.pushToIndex(i, false);\n }\n}\n\nexports.Lexer = Lexer;\n","'use strict';\n\n/**\n * Tracks newlines during parsing in order to provide an efficient API for\n * determining the one-indexed `{ line, col }` position for any offset\n * within the input.\n */\nclass LineCounter {\n constructor() {\n this.lineStarts = [];\n /**\n * Should be called in ascending order. Otherwise, call\n * `lineCounter.lineStarts.sort()` before calling `linePos()`.\n */\n this.addNewLine = (offset) => this.lineStarts.push(offset);\n /**\n * Performs a binary search and returns the 1-indexed { line, col }\n * position of `offset`. If `line === 0`, `addNewLine` has never been\n * called or `offset` is before the first known newline.\n */\n this.linePos = (offset) => {\n let low = 0;\n let high = this.lineStarts.length;\n while (low < high) {\n const mid = (low + high) >> 1; // Math.floor((low + high) / 2)\n if (this.lineStarts[mid] < offset)\n low = mid + 1;\n else\n high = mid;\n }\n if (this.lineStarts[low] === offset)\n return { line: low + 1, col: 1 };\n if (low === 0)\n return { line: 0, col: offset };\n const start = this.lineStarts[low - 1];\n return { line: low, col: offset - start + 1 };\n };\n }\n}\n\nexports.LineCounter = LineCounter;\n","'use strict';\n\nvar cst = require('./cst.js');\nvar lexer = require('./lexer.js');\n\nfunction includesToken(list, type) {\n for (let i = 0; i < list.length; ++i)\n if (list[i].type === type)\n return true;\n return false;\n}\nfunction findNonEmptyIndex(list) {\n for (let i = 0; i < list.length; ++i) {\n switch (list[i].type) {\n case 'space':\n case 'comment':\n case 'newline':\n break;\n default:\n return i;\n }\n }\n return -1;\n}\nfunction isFlowToken(token) {\n switch (token?.type) {\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n case 'flow-collection':\n return true;\n default:\n return false;\n }\n}\nfunction getPrevProps(parent) {\n switch (parent.type) {\n case 'document':\n return parent.start;\n case 'block-map': {\n const it = parent.items[parent.items.length - 1];\n return it.sep ?? it.start;\n }\n case 'block-seq':\n return parent.items[parent.items.length - 1].start;\n /* istanbul ignore next should not happen */\n default:\n return [];\n }\n}\n/** Note: May modify input array */\nfunction getFirstKeyStartProps(prev) {\n if (prev.length === 0)\n return [];\n let i = prev.length;\n loop: while (--i >= 0) {\n switch (prev[i].type) {\n case 'doc-start':\n case 'explicit-key-ind':\n case 'map-value-ind':\n case 'seq-item-ind':\n case 'newline':\n break loop;\n }\n }\n while (prev[++i]?.type === 'space') {\n /* loop */\n }\n return prev.splice(i, prev.length);\n}\nfunction fixFlowSeqItems(fc) {\n if (fc.start.type === 'flow-seq-start') {\n for (const it of fc.items) {\n if (it.sep &&\n !it.value &&\n !includesToken(it.start, 'explicit-key-ind') &&\n !includesToken(it.sep, 'map-value-ind')) {\n if (it.key)\n it.value = it.key;\n delete it.key;\n if (isFlowToken(it.value)) {\n if (it.value.end)\n Array.prototype.push.apply(it.value.end, it.sep);\n else\n it.value.end = it.sep;\n }\n else\n Array.prototype.push.apply(it.start, it.sep);\n delete it.sep;\n }\n }\n }\n}\n/**\n * A YAML concrete syntax tree (CST) parser\n *\n * ```ts\n * const src: string = ...\n * for (const token of new Parser().parse(src)) {\n * // token: Token\n * }\n * ```\n *\n * To use the parser with a user-provided lexer:\n *\n * ```ts\n * function* parse(source: string, lexer: Lexer) {\n * const parser = new Parser()\n * for (const lexeme of lexer.lex(source))\n * yield* parser.next(lexeme)\n * yield* parser.end()\n * }\n *\n * const src: string = ...\n * const lexer = new Lexer()\n * for (const token of parse(src, lexer)) {\n * // token: Token\n * }\n * ```\n */\nclass Parser {\n /**\n * @param onNewLine - If defined, called separately with the start position of\n * each new line (in `parse()`, including the start of input).\n */\n constructor(onNewLine) {\n /** If true, space and sequence indicators count as indentation */\n this.atNewLine = true;\n /** If true, next token is a scalar value */\n this.atScalar = false;\n /** Current indentation level */\n this.indent = 0;\n /** Current offset since the start of parsing */\n this.offset = 0;\n /** On the same line with a block map key */\n this.onKeyLine = false;\n /** Top indicates the node that's currently being built */\n this.stack = [];\n /** The source of the current token, set in parse() */\n this.source = '';\n /** The type of the current token, set in parse() */\n this.type = '';\n // Must be defined after `next()`\n this.lexer = new lexer.Lexer();\n this.onNewLine = onNewLine;\n }\n /**\n * Parse `source` as a YAML stream.\n * If `incomplete`, a part of the last line may be left as a buffer for the next call.\n *\n * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.\n *\n * @returns A generator of tokens representing each directive, document, and other structure.\n */\n *parse(source, incomplete = false) {\n if (this.onNewLine && this.offset === 0)\n this.onNewLine(0);\n for (const lexeme of this.lexer.lex(source, incomplete))\n yield* this.next(lexeme);\n if (!incomplete)\n yield* this.end();\n }\n /**\n * Advance the parser by the `source` of one lexical token.\n */\n *next(source) {\n this.source = source;\n if (process.env.LOG_TOKENS)\n console.log('|', cst.prettyToken(source));\n if (this.atScalar) {\n this.atScalar = false;\n yield* this.step();\n this.offset += source.length;\n return;\n }\n const type = cst.tokenType(source);\n if (!type) {\n const message = `Not a YAML token: ${source}`;\n yield* this.pop({ type: 'error', offset: this.offset, message, source });\n this.offset += source.length;\n }\n else if (type === 'scalar') {\n this.atNewLine = false;\n this.atScalar = true;\n this.type = 'scalar';\n }\n else {\n this.type = type;\n yield* this.step();\n switch (type) {\n case 'newline':\n this.atNewLine = true;\n this.indent = 0;\n if (this.onNewLine)\n this.onNewLine(this.offset + source.length);\n break;\n case 'space':\n if (this.atNewLine && source[0] === ' ')\n this.indent += source.length;\n break;\n case 'explicit-key-ind':\n case 'map-value-ind':\n case 'seq-item-ind':\n if (this.atNewLine)\n this.indent += source.length;\n break;\n case 'doc-mode':\n case 'flow-error-end':\n return;\n default:\n this.atNewLine = false;\n }\n this.offset += source.length;\n }\n }\n /** Call at end of input to push out any remaining constructions */\n *end() {\n while (this.stack.length > 0)\n yield* this.pop();\n }\n get sourceToken() {\n const st = {\n type: this.type,\n offset: this.offset,\n indent: this.indent,\n source: this.source\n };\n return st;\n }\n *step() {\n const top = this.peek(1);\n if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {\n while (this.stack.length > 0)\n yield* this.pop();\n this.stack.push({\n type: 'doc-end',\n offset: this.offset,\n source: this.source\n });\n return;\n }\n if (!top)\n return yield* this.stream();\n switch (top.type) {\n case 'document':\n return yield* this.document(top);\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return yield* this.scalar(top);\n case 'block-scalar':\n return yield* this.blockScalar(top);\n case 'block-map':\n return yield* this.blockMap(top);\n case 'block-seq':\n return yield* this.blockSequence(top);\n case 'flow-collection':\n return yield* this.flowCollection(top);\n case 'doc-end':\n return yield* this.documentEnd(top);\n }\n /* istanbul ignore next should not happen */\n yield* this.pop();\n }\n peek(n) {\n return this.stack[this.stack.length - n];\n }\n *pop(error) {\n const token = error ?? this.stack.pop();\n /* istanbul ignore if should not happen */\n if (!token) {\n const message = 'Tried to pop an empty stack';\n yield { type: 'error', offset: this.offset, source: '', message };\n }\n else if (this.stack.length === 0) {\n yield token;\n }\n else {\n const top = this.peek(1);\n if (token.type === 'block-scalar') {\n // Block scalars use their parent rather than header indent\n token.indent = 'indent' in top ? top.indent : 0;\n }\n else if (token.type === 'flow-collection' && top.type === 'document') {\n // Ignore all indent for top-level flow collections\n token.indent = 0;\n }\n if (token.type === 'flow-collection')\n fixFlowSeqItems(token);\n switch (top.type) {\n case 'document':\n top.value = token;\n break;\n case 'block-scalar':\n top.props.push(token); // error\n break;\n case 'block-map': {\n const it = top.items[top.items.length - 1];\n if (it.value) {\n top.items.push({ start: [], key: token, sep: [] });\n this.onKeyLine = true;\n return;\n }\n else if (it.sep) {\n it.value = token;\n }\n else {\n Object.assign(it, { key: token, sep: [] });\n this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');\n return;\n }\n break;\n }\n case 'block-seq': {\n const it = top.items[top.items.length - 1];\n if (it.value)\n top.items.push({ start: [], value: token });\n else\n it.value = token;\n break;\n }\n case 'flow-collection': {\n const it = top.items[top.items.length - 1];\n if (!it || it.value)\n top.items.push({ start: [], key: token, sep: [] });\n else if (it.sep)\n it.value = token;\n else\n Object.assign(it, { key: token, sep: [] });\n return;\n }\n /* istanbul ignore next should not happen */\n default:\n yield* this.pop();\n yield* this.pop(token);\n }\n if ((top.type === 'document' ||\n top.type === 'block-map' ||\n top.type === 'block-seq') &&\n (token.type === 'block-map' || token.type === 'block-seq')) {\n const last = token.items[token.items.length - 1];\n if (last &&\n !last.sep &&\n !last.value &&\n last.start.length > 0 &&\n findNonEmptyIndex(last.start) === -1 &&\n (token.indent === 0 ||\n last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {\n if (top.type === 'document')\n top.end = last.start;\n else\n top.items.push({ start: last.start });\n token.items.splice(-1, 1);\n }\n }\n }\n }\n *stream() {\n switch (this.type) {\n case 'directive-line':\n yield { type: 'directive', offset: this.offset, source: this.source };\n return;\n case 'byte-order-mark':\n case 'space':\n case 'comment':\n case 'newline':\n yield this.sourceToken;\n return;\n case 'doc-mode':\n case 'doc-start': {\n const doc = {\n type: 'document',\n offset: this.offset,\n start: []\n };\n if (this.type === 'doc-start')\n doc.start.push(this.sourceToken);\n this.stack.push(doc);\n return;\n }\n }\n yield {\n type: 'error',\n offset: this.offset,\n message: `Unexpected ${this.type} token in YAML stream`,\n source: this.source\n };\n }\n *document(doc) {\n if (doc.value)\n return yield* this.lineEnd(doc);\n switch (this.type) {\n case 'doc-start': {\n if (findNonEmptyIndex(doc.start) !== -1) {\n yield* this.pop();\n yield* this.step();\n }\n else\n doc.start.push(this.sourceToken);\n return;\n }\n case 'anchor':\n case 'tag':\n case 'space':\n case 'comment':\n case 'newline':\n doc.start.push(this.sourceToken);\n return;\n }\n const bv = this.startBlockValue(doc);\n if (bv)\n this.stack.push(bv);\n else {\n yield {\n type: 'error',\n offset: this.offset,\n message: `Unexpected ${this.type} token in YAML document`,\n source: this.source\n };\n }\n }\n *scalar(scalar) {\n if (this.type === 'map-value-ind') {\n const prev = getPrevProps(this.peek(2));\n const start = getFirstKeyStartProps(prev);\n let sep;\n if (scalar.end) {\n sep = scalar.end;\n sep.push(this.sourceToken);\n delete scalar.end;\n }\n else\n sep = [this.sourceToken];\n const map = {\n type: 'block-map',\n offset: scalar.offset,\n indent: scalar.indent,\n items: [{ start, key: scalar, sep }]\n };\n this.onKeyLine = true;\n this.stack[this.stack.length - 1] = map;\n }\n else\n yield* this.lineEnd(scalar);\n }\n *blockScalar(scalar) {\n switch (this.type) {\n case 'space':\n case 'comment':\n case 'newline':\n scalar.props.push(this.sourceToken);\n return;\n case 'scalar':\n scalar.source = this.source;\n // block-scalar source includes trailing newline\n this.atNewLine = true;\n this.indent = 0;\n if (this.onNewLine) {\n let nl = this.source.indexOf('\\n') + 1;\n while (nl !== 0) {\n this.onNewLine(this.offset + nl);\n nl = this.source.indexOf('\\n', nl) + 1;\n }\n }\n yield* this.pop();\n break;\n /* istanbul ignore next should not happen */\n default:\n yield* this.pop();\n yield* this.step();\n }\n }\n *blockMap(map) {\n const it = map.items[map.items.length - 1];\n // it.sep is true-ish if pair already has key or : separator\n switch (this.type) {\n case 'newline':\n this.onKeyLine = false;\n if (it.value) {\n const end = 'end' in it.value ? it.value.end : undefined;\n const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n if (last?.type === 'comment')\n end?.push(this.sourceToken);\n else\n map.items.push({ start: [this.sourceToken] });\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n it.start.push(this.sourceToken);\n }\n return;\n case 'space':\n case 'comment':\n if (it.value) {\n map.items.push({ start: [this.sourceToken] });\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n if (this.atIndentedComment(it.start, map.indent)) {\n const prev = map.items[map.items.length - 2];\n const end = prev?.value?.end;\n if (Array.isArray(end)) {\n Array.prototype.push.apply(end, it.start);\n end.push(this.sourceToken);\n map.items.pop();\n return;\n }\n }\n it.start.push(this.sourceToken);\n }\n return;\n }\n if (this.indent >= map.indent) {\n const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;\n // For empty nodes, assign newline-separated not indented empty tokens to following node\n let start = [];\n if (atNextItem && it.sep && !it.value) {\n const nl = [];\n for (let i = 0; i < it.sep.length; ++i) {\n const st = it.sep[i];\n switch (st.type) {\n case 'newline':\n nl.push(i);\n break;\n case 'space':\n break;\n case 'comment':\n if (st.indent > map.indent)\n nl.length = 0;\n break;\n default:\n nl.length = 0;\n }\n }\n if (nl.length >= 2)\n start = it.sep.splice(nl[1]);\n }\n switch (this.type) {\n case 'anchor':\n case 'tag':\n if (atNextItem || it.value) {\n start.push(this.sourceToken);\n map.items.push({ start });\n this.onKeyLine = true;\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n it.start.push(this.sourceToken);\n }\n return;\n case 'explicit-key-ind':\n if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {\n it.start.push(this.sourceToken);\n }\n else if (atNextItem || it.value) {\n start.push(this.sourceToken);\n map.items.push({ start });\n }\n else {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [this.sourceToken] }]\n });\n }\n this.onKeyLine = true;\n return;\n case 'map-value-ind':\n if (includesToken(it.start, 'explicit-key-ind')) {\n if (!it.sep) {\n if (includesToken(it.start, 'newline')) {\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n }\n else {\n const start = getFirstKeyStartProps(it.start);\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n });\n }\n }\n else if (it.value) {\n map.items.push({ start: [], key: null, sep: [this.sourceToken] });\n }\n else if (includesToken(it.sep, 'map-value-ind')) {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n });\n }\n else if (isFlowToken(it.key) &&\n !includesToken(it.sep, 'newline')) {\n const start = getFirstKeyStartProps(it.start);\n const key = it.key;\n const sep = it.sep;\n sep.push(this.sourceToken);\n // @ts-expect-error type guard is wrong here\n delete it.key, delete it.sep;\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key, sep }]\n });\n }\n else if (start.length > 0) {\n // Not actually at next item\n it.sep = it.sep.concat(start, this.sourceToken);\n }\n else {\n it.sep.push(this.sourceToken);\n }\n }\n else {\n if (!it.sep) {\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n }\n else if (it.value || atNextItem) {\n map.items.push({ start, key: null, sep: [this.sourceToken] });\n }\n else if (includesToken(it.sep, 'map-value-ind')) {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [], key: null, sep: [this.sourceToken] }]\n });\n }\n else {\n it.sep.push(this.sourceToken);\n }\n }\n this.onKeyLine = true;\n return;\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar': {\n const fs = this.flowScalar(this.type);\n if (atNextItem || it.value) {\n map.items.push({ start, key: fs, sep: [] });\n this.onKeyLine = true;\n }\n else if (it.sep) {\n this.stack.push(fs);\n }\n else {\n Object.assign(it, { key: fs, sep: [] });\n this.onKeyLine = true;\n }\n return;\n }\n default: {\n const bv = this.startBlockValue(map);\n if (bv) {\n if (atNextItem &&\n bv.type !== 'block-seq' &&\n includesToken(it.start, 'explicit-key-ind')) {\n map.items.push({ start });\n }\n this.stack.push(bv);\n return;\n }\n }\n }\n }\n yield* this.pop();\n yield* this.step();\n }\n *blockSequence(seq) {\n const it = seq.items[seq.items.length - 1];\n switch (this.type) {\n case 'newline':\n if (it.value) {\n const end = 'end' in it.value ? it.value.end : undefined;\n const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n if (last?.type === 'comment')\n end?.push(this.sourceToken);\n else\n seq.items.push({ start: [this.sourceToken] });\n }\n else\n it.start.push(this.sourceToken);\n return;\n case 'space':\n case 'comment':\n if (it.value)\n seq.items.push({ start: [this.sourceToken] });\n else {\n if (this.atIndentedComment(it.start, seq.indent)) {\n const prev = seq.items[seq.items.length - 2];\n const end = prev?.value?.end;\n if (Array.isArray(end)) {\n Array.prototype.push.apply(end, it.start);\n end.push(this.sourceToken);\n seq.items.pop();\n return;\n }\n }\n it.start.push(this.sourceToken);\n }\n return;\n case 'anchor':\n case 'tag':\n if (it.value || this.indent <= seq.indent)\n break;\n it.start.push(this.sourceToken);\n return;\n case 'seq-item-ind':\n if (this.indent !== seq.indent)\n break;\n if (it.value || includesToken(it.start, 'seq-item-ind'))\n seq.items.push({ start: [this.sourceToken] });\n else\n it.start.push(this.sourceToken);\n return;\n }\n if (this.indent > seq.indent) {\n const bv = this.startBlockValue(seq);\n if (bv) {\n this.stack.push(bv);\n return;\n }\n }\n yield* this.pop();\n yield* this.step();\n }\n *flowCollection(fc) {\n const it = fc.items[fc.items.length - 1];\n if (this.type === 'flow-error-end') {\n let top;\n do {\n yield* this.pop();\n top = this.peek(1);\n } while (top && top.type === 'flow-collection');\n }\n else if (fc.end.length === 0) {\n switch (this.type) {\n case 'comma':\n case 'explicit-key-ind':\n if (!it || it.sep)\n fc.items.push({ start: [this.sourceToken] });\n else\n it.start.push(this.sourceToken);\n return;\n case 'map-value-ind':\n if (!it || it.value)\n fc.items.push({ start: [], key: null, sep: [this.sourceToken] });\n else if (it.sep)\n it.sep.push(this.sourceToken);\n else\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n return;\n case 'space':\n case 'comment':\n case 'newline':\n case 'anchor':\n case 'tag':\n if (!it || it.value)\n fc.items.push({ start: [this.sourceToken] });\n else if (it.sep)\n it.sep.push(this.sourceToken);\n else\n it.start.push(this.sourceToken);\n return;\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar': {\n const fs = this.flowScalar(this.type);\n if (!it || it.value)\n fc.items.push({ start: [], key: fs, sep: [] });\n else if (it.sep)\n this.stack.push(fs);\n else\n Object.assign(it, { key: fs, sep: [] });\n return;\n }\n case 'flow-map-end':\n case 'flow-seq-end':\n fc.end.push(this.sourceToken);\n return;\n }\n const bv = this.startBlockValue(fc);\n /* istanbul ignore else should not happen */\n if (bv)\n this.stack.push(bv);\n else {\n yield* this.pop();\n yield* this.step();\n }\n }\n else {\n const parent = this.peek(2);\n if (parent.type === 'block-map' &&\n ((this.type === 'map-value-ind' && parent.indent === fc.indent) ||\n (this.type === 'newline' &&\n !parent.items[parent.items.length - 1].sep))) {\n yield* this.pop();\n yield* this.step();\n }\n else if (this.type === 'map-value-ind' &&\n parent.type !== 'flow-collection') {\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n fixFlowSeqItems(fc);\n const sep = fc.end.splice(1, fc.end.length);\n sep.push(this.sourceToken);\n const map = {\n type: 'block-map',\n offset: fc.offset,\n indent: fc.indent,\n items: [{ start, key: fc, sep }]\n };\n this.onKeyLine = true;\n this.stack[this.stack.length - 1] = map;\n }\n else {\n yield* this.lineEnd(fc);\n }\n }\n }\n flowScalar(type) {\n if (this.onNewLine) {\n let nl = this.source.indexOf('\\n') + 1;\n while (nl !== 0) {\n this.onNewLine(this.offset + nl);\n nl = this.source.indexOf('\\n', nl) + 1;\n }\n }\n return {\n type,\n offset: this.offset,\n indent: this.indent,\n source: this.source\n };\n }\n startBlockValue(parent) {\n switch (this.type) {\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return this.flowScalar(this.type);\n case 'block-scalar-header':\n return {\n type: 'block-scalar',\n offset: this.offset,\n indent: this.indent,\n props: [this.sourceToken],\n source: ''\n };\n case 'flow-map-start':\n case 'flow-seq-start':\n return {\n type: 'flow-collection',\n offset: this.offset,\n indent: this.indent,\n start: this.sourceToken,\n items: [],\n end: []\n };\n case 'seq-item-ind':\n return {\n type: 'block-seq',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [this.sourceToken] }]\n };\n case 'explicit-key-ind': {\n this.onKeyLine = true;\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n start.push(this.sourceToken);\n return {\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start }]\n };\n }\n case 'map-value-ind': {\n this.onKeyLine = true;\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n return {\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n };\n }\n }\n return null;\n }\n atIndentedComment(start, indent) {\n if (this.type !== 'comment')\n return false;\n if (this.indent <= indent)\n return false;\n return start.every(st => st.type === 'newline' || st.type === 'space');\n }\n *documentEnd(docEnd) {\n if (this.type !== 'doc-mode') {\n if (docEnd.end)\n docEnd.end.push(this.sourceToken);\n else\n docEnd.end = [this.sourceToken];\n if (this.type === 'newline')\n yield* this.pop();\n }\n }\n *lineEnd(token) {\n switch (this.type) {\n case 'comma':\n case 'doc-start':\n case 'doc-end':\n case 'flow-seq-end':\n case 'flow-map-end':\n case 'map-value-ind':\n yield* this.pop();\n yield* this.step();\n break;\n case 'newline':\n this.onKeyLine = false;\n // fallthrough\n case 'space':\n case 'comment':\n default:\n // all other values are errors\n if (token.end)\n token.end.push(this.sourceToken);\n else\n token.end = [this.sourceToken];\n if (this.type === 'newline')\n yield* this.pop();\n }\n }\n}\n\nexports.Parser = Parser;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar errors = require('./errors.js');\nvar log = require('./log.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\n\nfunction parseOptions(options) {\n const prettyErrors = options.prettyErrors !== false;\n const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null;\n return { lineCounter: lineCounter$1, prettyErrors };\n}\n/**\n * Parse the input as a stream of YAML documents.\n *\n * Documents should be separated from each other by `...` or `---` marker lines.\n *\n * @returns If an empty `docs` array is returned, it will be of type\n * EmptyStream and contain additional stream information. In\n * TypeScript, you should use `'empty' in docs` as a type guard for it.\n */\nfunction parseAllDocuments(source, options = {}) {\n const { lineCounter, prettyErrors } = parseOptions(options);\n const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n const composer$1 = new composer.Composer(options);\n const docs = Array.from(composer$1.compose(parser$1.parse(source)));\n if (prettyErrors && lineCounter)\n for (const doc of docs) {\n doc.errors.forEach(errors.prettifyError(source, lineCounter));\n doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n }\n if (docs.length > 0)\n return docs;\n return Object.assign([], { empty: true }, composer$1.streamInfo());\n}\n/** Parse an input string into a single YAML.Document */\nfunction parseDocument(source, options = {}) {\n const { lineCounter, prettyErrors } = parseOptions(options);\n const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n const composer$1 = new composer.Composer(options);\n // `doc` is always set by compose.end(true) at the very latest\n let doc = null;\n for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) {\n if (!doc)\n doc = _doc;\n else if (doc.options.logLevel !== 'silent') {\n doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));\n break;\n }\n }\n if (prettyErrors && lineCounter) {\n doc.errors.forEach(errors.prettifyError(source, lineCounter));\n doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n }\n return doc;\n}\nfunction parse(src, reviver, options) {\n let _reviver = undefined;\n if (typeof reviver === 'function') {\n _reviver = reviver;\n }\n else if (options === undefined && reviver && typeof reviver === 'object') {\n options = reviver;\n }\n const doc = parseDocument(src, options);\n if (!doc)\n return null;\n doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning));\n if (doc.errors.length > 0) {\n if (doc.options.logLevel !== 'silent')\n throw doc.errors[0];\n else\n doc.errors = [];\n }\n return doc.toJS(Object.assign({ reviver: _reviver }, options));\n}\nfunction stringify(value, replacer, options) {\n let _replacer = null;\n if (typeof replacer === 'function' || Array.isArray(replacer)) {\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n }\n if (typeof options === 'string')\n options = options.length;\n if (typeof options === 'number') {\n const indent = Math.round(options);\n options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };\n }\n if (value === undefined) {\n const { keepUndefined } = options ?? replacer ?? {};\n if (!keepUndefined)\n return undefined;\n }\n return new Document.Document(value, _replacer, options).toString(options);\n}\n\nexports.parse = parse;\nexports.parseAllDocuments = parseAllDocuments;\nexports.parseDocument = parseDocument;\nexports.stringify = stringify;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar map = require('./common/map.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar tags = require('./tags.js');\n\nconst sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;\nclass Schema {\n constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {\n this.compat = Array.isArray(compat)\n ? tags.getTags(compat, 'compat')\n : compat\n ? tags.getTags(null, compat)\n : null;\n this.merge = !!merge;\n this.name = (typeof schema === 'string' && schema) || 'core';\n this.knownTags = resolveKnownTags ? tags.coreKnownTags : {};\n this.tags = tags.getTags(customTags, this.name);\n this.toStringOptions = toStringDefaults ?? null;\n Object.defineProperty(this, Node.MAP, { value: map.map });\n Object.defineProperty(this, Node.SCALAR, { value: string.string });\n Object.defineProperty(this, Node.SEQ, { value: seq.seq });\n // Used by createMap()\n this.sortMapEntries =\n typeof sortMapEntries === 'function'\n ? sortMapEntries\n : sortMapEntries === true\n ? sortMapEntriesByKey\n : null;\n }\n clone() {\n const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));\n copy.tags = this.tags.slice();\n return copy;\n }\n}\n\nexports.Schema = Schema;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nfunction createMap(schema, obj, ctx) {\n const { keepUndefined, replacer } = ctx;\n const map = new YAMLMap.YAMLMap(schema);\n const add = (key, value) => {\n if (typeof replacer === 'function')\n value = replacer.call(obj, key, value);\n else if (Array.isArray(replacer) && !replacer.includes(key))\n return;\n if (value !== undefined || keepUndefined)\n map.items.push(Pair.createPair(key, value, ctx));\n };\n if (obj instanceof Map) {\n for (const [key, value] of obj)\n add(key, value);\n }\n else if (obj && typeof obj === 'object') {\n for (const key of Object.keys(obj))\n add(key, obj[key]);\n }\n if (typeof schema.sortMapEntries === 'function') {\n map.items.sort(schema.sortMapEntries);\n }\n return map;\n}\nconst map = {\n collection: 'map',\n createNode: createMap,\n default: true,\n nodeClass: YAMLMap.YAMLMap,\n tag: 'tag:yaml.org,2002:map',\n resolve(map, onError) {\n if (!Node.isMap(map))\n onError('Expected a mapping for this tag');\n return map;\n }\n};\n\nexports.map = map;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst nullTag = {\n identify: value => value == null,\n createNode: () => new Scalar.Scalar(null),\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^(?:~|[Nn]ull|NULL)?$/,\n resolve: () => new Scalar.Scalar(null),\n stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)\n ? source\n : ctx.options.nullStr\n};\n\nexports.nullTag = nullTag;\n","'use strict';\n\nvar createNode = require('../../doc/createNode.js');\nvar Node = require('../../nodes/Node.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nfunction createSeq(schema, obj, ctx) {\n const { replacer } = ctx;\n const seq = new YAMLSeq.YAMLSeq(schema);\n if (obj && Symbol.iterator in Object(obj)) {\n let i = 0;\n for (let it of obj) {\n if (typeof replacer === 'function') {\n const key = obj instanceof Set ? it : String(i++);\n it = replacer.call(obj, key, it);\n }\n seq.items.push(createNode.createNode(it, undefined, ctx));\n }\n }\n return seq;\n}\nconst seq = {\n collection: 'seq',\n createNode: createSeq,\n default: true,\n nodeClass: YAMLSeq.YAMLSeq,\n tag: 'tag:yaml.org,2002:seq',\n resolve(seq, onError) {\n if (!Node.isSeq(seq))\n onError('Expected a sequence for this tag');\n return seq;\n }\n};\n\nexports.seq = seq;\n","'use strict';\n\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst string = {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: str => str,\n stringify(item, ctx, onComment, onChompKeep) {\n ctx = Object.assign({ actualString: true }, ctx);\n return stringifyString.stringifyString(item, ctx, onComment, onChompKeep);\n }\n};\n\nexports.string = string;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst boolTag = {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,\n resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'),\n stringify({ source, value }, ctx) {\n if (source && boolTag.test.test(source)) {\n const sv = source[0] === 't' || source[0] === 'T';\n if (value === sv)\n return source;\n }\n return value ? ctx.options.trueStr : ctx.options.falseStr;\n }\n};\n\nexports.boolTag = boolTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^(?:[-+]?\\.(?:inf|Inf|INF|nan|NaN|NAN))$/,\n resolve: str => str.slice(-3).toLowerCase() === 'nan'\n ? NaN\n : str[0] === '-'\n ? Number.NEGATIVE_INFINITY\n : Number.POSITIVE_INFINITY,\n stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+(?:\\.[0-9]*)?)[eE][-+]?[0-9]+$/,\n resolve: str => parseFloat(str),\n stringify(node) {\n const num = Number(node.value);\n return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n }\n};\nconst float = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+\\.[0-9]*)$/,\n resolve(str) {\n const node = new Scalar.Scalar(parseFloat(str));\n const dot = str.indexOf('.');\n if (dot !== -1 && str[str.length - 1] === '0')\n node.minFractionDigits = str.length - dot - 1;\n return node;\n },\n stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nconst intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));\nfunction intStringify(node, radix, prefix) {\n const { value } = node;\n if (intIdentify(value) && value >= 0)\n return prefix + value.toString(radix);\n return stringifyNumber.stringifyNumber(node);\n}\nconst intOct = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^0o[0-7]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt),\n stringify: node => intStringify(node, 8, '0o')\n};\nconst int = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^0x[0-9a-fA-F]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\n\nconst schema = [\n map.map,\n seq.seq,\n string.string,\n _null.nullTag,\n bool.boolTag,\n int.intOct,\n int.int,\n int.intHex,\n float.floatNaN,\n float.floatExp,\n float.float\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar map = require('../common/map.js');\nvar seq = require('../common/seq.js');\n\nfunction intIdentify(value) {\n return typeof value === 'bigint' || Number.isInteger(value);\n}\nconst stringifyJSON = ({ value }) => JSON.stringify(value);\nconst jsonScalars = [\n {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: str => str,\n stringify: stringifyJSON\n },\n {\n identify: value => value == null,\n createNode: () => new Scalar.Scalar(null),\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^null$/,\n resolve: () => null,\n stringify: stringifyJSON\n },\n {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^true|false$/,\n resolve: str => str === 'true',\n stringify: stringifyJSON\n },\n {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^-?(?:0|[1-9][0-9]*)$/,\n resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),\n stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value)\n },\n {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^-?(?:0|[1-9][0-9]*)(?:\\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,\n resolve: str => parseFloat(str),\n stringify: stringifyJSON\n }\n];\nconst jsonError = {\n default: true,\n tag: '',\n test: /^/,\n resolve(str, onError) {\n onError(`Unresolved plain scalar ${JSON.stringify(str)}`);\n return str;\n }\n};\nconst schema = [map.map, seq.seq].concat(jsonScalars, jsonError);\n\nexports.schema = schema;\n","'use strict';\n\nvar map = require('./common/map.js');\nvar _null = require('./common/null.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar bool = require('./core/bool.js');\nvar float = require('./core/float.js');\nvar int = require('./core/int.js');\nvar schema = require('./core/schema.js');\nvar schema$1 = require('./json/schema.js');\nvar binary = require('./yaml-1.1/binary.js');\nvar omap = require('./yaml-1.1/omap.js');\nvar pairs = require('./yaml-1.1/pairs.js');\nvar schema$2 = require('./yaml-1.1/schema.js');\nvar set = require('./yaml-1.1/set.js');\nvar timestamp = require('./yaml-1.1/timestamp.js');\n\nconst schemas = new Map([\n ['core', schema.schema],\n ['failsafe', [map.map, seq.seq, string.string]],\n ['json', schema$1.schema],\n ['yaml11', schema$2.schema],\n ['yaml-1.1', schema$2.schema]\n]);\nconst tagsByName = {\n binary: binary.binary,\n bool: bool.boolTag,\n float: float.float,\n floatExp: float.floatExp,\n floatNaN: float.floatNaN,\n floatTime: timestamp.floatTime,\n int: int.int,\n intHex: int.intHex,\n intOct: int.intOct,\n intTime: timestamp.intTime,\n map: map.map,\n null: _null.nullTag,\n omap: omap.omap,\n pairs: pairs.pairs,\n seq: seq.seq,\n set: set.set,\n timestamp: timestamp.timestamp\n};\nconst coreKnownTags = {\n 'tag:yaml.org,2002:binary': binary.binary,\n 'tag:yaml.org,2002:omap': omap.omap,\n 'tag:yaml.org,2002:pairs': pairs.pairs,\n 'tag:yaml.org,2002:set': set.set,\n 'tag:yaml.org,2002:timestamp': timestamp.timestamp\n};\nfunction getTags(customTags, schemaName) {\n let tags = schemas.get(schemaName);\n if (!tags) {\n if (Array.isArray(customTags))\n tags = [];\n else {\n const keys = Array.from(schemas.keys())\n .filter(key => key !== 'yaml11')\n .map(key => JSON.stringify(key))\n .join(', ');\n throw new Error(`Unknown schema \"${schemaName}\"; use one of ${keys} or define customTags array`);\n }\n }\n if (Array.isArray(customTags)) {\n for (const tag of customTags)\n tags = tags.concat(tag);\n }\n else if (typeof customTags === 'function') {\n tags = customTags(tags.slice());\n }\n return tags.map(tag => {\n if (typeof tag !== 'string')\n return tag;\n const tagObj = tagsByName[tag];\n if (tagObj)\n return tagObj;\n const keys = Object.keys(tagsByName)\n .map(key => JSON.stringify(key))\n .join(', ');\n throw new Error(`Unknown custom tag \"${tag}\"; use one of ${keys}`);\n });\n}\n\nexports.coreKnownTags = coreKnownTags;\nexports.getTags = getTags;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst binary = {\n identify: value => value instanceof Uint8Array,\n default: false,\n tag: 'tag:yaml.org,2002:binary',\n /**\n * Returns a Buffer in node and an Uint8Array in browsers\n *\n * To use the resulting buffer as an image, you'll want to do something like:\n *\n * const blob = new Blob([buffer], { type: 'image/jpeg' })\n * document.querySelector('#photo').src = URL.createObjectURL(blob)\n */\n resolve(src, onError) {\n if (typeof Buffer === 'function') {\n return Buffer.from(src, 'base64');\n }\n else if (typeof atob === 'function') {\n // On IE 11, atob() can't handle newlines\n const str = atob(src.replace(/[\\n\\r]/g, ''));\n const buffer = new Uint8Array(str.length);\n for (let i = 0; i < str.length; ++i)\n buffer[i] = str.charCodeAt(i);\n return buffer;\n }\n else {\n onError('This environment does not support reading binary tags; either Buffer or atob is required');\n return src;\n }\n },\n stringify({ comment, type, value }, ctx, onComment, onChompKeep) {\n const buf = value; // checked earlier by binary.identify()\n let str;\n if (typeof Buffer === 'function') {\n str =\n buf instanceof Buffer\n ? buf.toString('base64')\n : Buffer.from(buf.buffer).toString('base64');\n }\n else if (typeof btoa === 'function') {\n let s = '';\n for (let i = 0; i < buf.length; ++i)\n s += String.fromCharCode(buf[i]);\n str = btoa(s);\n }\n else {\n throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');\n }\n if (!type)\n type = Scalar.Scalar.BLOCK_LITERAL;\n if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);\n const n = Math.ceil(str.length / lineWidth);\n const lines = new Array(n);\n for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {\n lines[i] = str.substr(o, lineWidth);\n }\n str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\\n' : ' ');\n }\n return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);\n }\n};\n\nexports.binary = binary;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nfunction boolStringify({ value, source }, ctx) {\n const boolObj = value ? trueTag : falseTag;\n if (source && boolObj.test.test(source))\n return source;\n return value ? ctx.options.trueStr : ctx.options.falseStr;\n}\nconst trueTag = {\n identify: value => value === true,\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,\n resolve: () => new Scalar.Scalar(true),\n stringify: boolStringify\n};\nconst falseTag = {\n identify: value => value === false,\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,\n resolve: () => new Scalar.Scalar(false),\n stringify: boolStringify\n};\n\nexports.falseTag = falseTag;\nexports.trueTag = trueTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?\\.(?:inf|Inf|INF|nan|NaN|NAN)$/,\n resolve: (str) => str.slice(-3).toLowerCase() === 'nan'\n ? NaN\n : str[0] === '-'\n ? Number.NEGATIVE_INFINITY\n : Number.POSITIVE_INFINITY,\n stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:[0-9][0-9_]*)?(?:\\.[0-9_]*)?[eE][-+]?[0-9]+$/,\n resolve: (str) => parseFloat(str.replace(/_/g, '')),\n stringify(node) {\n const num = Number(node.value);\n return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n }\n};\nconst float = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:[0-9][0-9_]*)?\\.[0-9_]*$/,\n resolve(str) {\n const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, '')));\n const dot = str.indexOf('.');\n if (dot !== -1) {\n const f = str.substring(dot + 1).replace(/_/g, '');\n if (f[f.length - 1] === '0')\n node.minFractionDigits = f.length;\n }\n return node;\n },\n stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nfunction intResolve(str, offset, radix, { intAsBigInt }) {\n const sign = str[0];\n if (sign === '-' || sign === '+')\n offset += 1;\n str = str.substring(offset).replace(/_/g, '');\n if (intAsBigInt) {\n switch (radix) {\n case 2:\n str = `0b${str}`;\n break;\n case 8:\n str = `0o${str}`;\n break;\n case 16:\n str = `0x${str}`;\n break;\n }\n const n = BigInt(str);\n return sign === '-' ? BigInt(-1) * n : n;\n }\n const n = parseInt(str, radix);\n return sign === '-' ? -1 * n : n;\n}\nfunction intStringify(node, radix, prefix) {\n const { value } = node;\n if (intIdentify(value)) {\n const str = value.toString(radix);\n return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;\n }\n return stringifyNumber.stringifyNumber(node);\n}\nconst intBin = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'BIN',\n test: /^[-+]?0b[0-1_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),\n stringify: node => intStringify(node, 2, '0b')\n};\nconst intOct = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^[-+]?0[0-7_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),\n stringify: node => intStringify(node, 8, '0')\n};\nconst int = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9][0-9_]*$/,\n resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^[-+]?0x[0-9a-fA-F_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intBin = intBin;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\nvar toJS = require('../../nodes/toJS.js');\nvar Node = require('../../nodes/Node.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\nvar pairs = require('./pairs.js');\n\nclass YAMLOMap extends YAMLSeq.YAMLSeq {\n constructor() {\n super();\n this.add = YAMLMap.YAMLMap.prototype.add.bind(this);\n this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this);\n this.get = YAMLMap.YAMLMap.prototype.get.bind(this);\n this.has = YAMLMap.YAMLMap.prototype.has.bind(this);\n this.set = YAMLMap.YAMLMap.prototype.set.bind(this);\n this.tag = YAMLOMap.tag;\n }\n /**\n * If `ctx` is given, the return type is actually `Map`,\n * but TypeScript won't allow widening the signature of a child method.\n */\n toJSON(_, ctx) {\n if (!ctx)\n return super.toJSON(_);\n const map = new Map();\n if (ctx?.onCreate)\n ctx.onCreate(map);\n for (const pair of this.items) {\n let key, value;\n if (Node.isPair(pair)) {\n key = toJS.toJS(pair.key, '', ctx);\n value = toJS.toJS(pair.value, key, ctx);\n }\n else {\n key = toJS.toJS(pair, '', ctx);\n }\n if (map.has(key))\n throw new Error('Ordered maps must not include duplicate keys');\n map.set(key, value);\n }\n return map;\n }\n}\nYAMLOMap.tag = 'tag:yaml.org,2002:omap';\nconst omap = {\n collection: 'seq',\n identify: value => value instanceof Map,\n nodeClass: YAMLOMap,\n default: false,\n tag: 'tag:yaml.org,2002:omap',\n resolve(seq, onError) {\n const pairs$1 = pairs.resolvePairs(seq, onError);\n const seenKeys = [];\n for (const { key } of pairs$1.items) {\n if (Node.isScalar(key)) {\n if (seenKeys.includes(key.value)) {\n onError(`Ordered maps must not include duplicate keys: ${key.value}`);\n }\n else {\n seenKeys.push(key.value);\n }\n }\n }\n return Object.assign(new YAMLOMap(), pairs$1);\n },\n createNode(schema, iterable, ctx) {\n const pairs$1 = pairs.createPairs(schema, iterable, ctx);\n const omap = new YAMLOMap();\n omap.items = pairs$1.items;\n return omap;\n }\n};\n\nexports.YAMLOMap = YAMLOMap;\nexports.omap = omap;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar Scalar = require('../../nodes/Scalar.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nfunction resolvePairs(seq, onError) {\n if (Node.isSeq(seq)) {\n for (let i = 0; i < seq.items.length; ++i) {\n let item = seq.items[i];\n if (Node.isPair(item))\n continue;\n else if (Node.isMap(item)) {\n if (item.items.length > 1)\n onError('Each pair must have its own sequence indicator');\n const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null));\n if (item.commentBefore)\n pair.key.commentBefore = pair.key.commentBefore\n ? `${item.commentBefore}\\n${pair.key.commentBefore}`\n : item.commentBefore;\n if (item.comment) {\n const cn = pair.value ?? pair.key;\n cn.comment = cn.comment\n ? `${item.comment}\\n${cn.comment}`\n : item.comment;\n }\n item = pair;\n }\n seq.items[i] = Node.isPair(item) ? item : new Pair.Pair(item);\n }\n }\n else\n onError('Expected a sequence for this tag');\n return seq;\n}\nfunction createPairs(schema, iterable, ctx) {\n const { replacer } = ctx;\n const pairs = new YAMLSeq.YAMLSeq(schema);\n pairs.tag = 'tag:yaml.org,2002:pairs';\n let i = 0;\n if (iterable && Symbol.iterator in Object(iterable))\n for (let it of iterable) {\n if (typeof replacer === 'function')\n it = replacer.call(iterable, String(i++), it);\n let key, value;\n if (Array.isArray(it)) {\n if (it.length === 2) {\n key = it[0];\n value = it[1];\n }\n else\n throw new TypeError(`Expected [key, value] tuple: ${it}`);\n }\n else if (it && it instanceof Object) {\n const keys = Object.keys(it);\n if (keys.length === 1) {\n key = keys[0];\n value = it[key];\n }\n else\n throw new TypeError(`Expected { key: value } tuple: ${it}`);\n }\n else {\n key = it;\n }\n pairs.items.push(Pair.createPair(key, value, ctx));\n }\n return pairs;\n}\nconst pairs = {\n collection: 'seq',\n default: false,\n tag: 'tag:yaml.org,2002:pairs',\n resolve: resolvePairs,\n createNode: createPairs\n};\n\nexports.createPairs = createPairs;\nexports.pairs = pairs;\nexports.resolvePairs = resolvePairs;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar binary = require('./binary.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\nvar omap = require('./omap.js');\nvar pairs = require('./pairs.js');\nvar set = require('./set.js');\nvar timestamp = require('./timestamp.js');\n\nconst schema = [\n map.map,\n seq.seq,\n string.string,\n _null.nullTag,\n bool.trueTag,\n bool.falseTag,\n int.intBin,\n int.intOct,\n int.int,\n int.intHex,\n float.floatNaN,\n float.floatExp,\n float.float,\n binary.binary,\n omap.omap,\n pairs.pairs,\n set.set,\n timestamp.intTime,\n timestamp.floatTime,\n timestamp.timestamp\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nclass YAMLSet extends YAMLMap.YAMLMap {\n constructor(schema) {\n super(schema);\n this.tag = YAMLSet.tag;\n }\n add(key) {\n let pair;\n if (Node.isPair(key))\n pair = key;\n else if (key &&\n typeof key === 'object' &&\n 'key' in key &&\n 'value' in key &&\n key.value === null)\n pair = new Pair.Pair(key.key, null);\n else\n pair = new Pair.Pair(key, null);\n const prev = YAMLMap.findPair(this.items, pair.key);\n if (!prev)\n this.items.push(pair);\n }\n /**\n * If `keepPair` is `true`, returns the Pair matching `key`.\n * Otherwise, returns the value of that Pair's key.\n */\n get(key, keepPair) {\n const pair = YAMLMap.findPair(this.items, key);\n return !keepPair && Node.isPair(pair)\n ? Node.isScalar(pair.key)\n ? pair.key.value\n : pair.key\n : pair;\n }\n set(key, value) {\n if (typeof value !== 'boolean')\n throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);\n const prev = YAMLMap.findPair(this.items, key);\n if (prev && !value) {\n this.items.splice(this.items.indexOf(prev), 1);\n }\n else if (!prev && value) {\n this.items.push(new Pair.Pair(key));\n }\n }\n toJSON(_, ctx) {\n return super.toJSON(_, ctx, Set);\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n if (this.hasAllNullValues(true))\n return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);\n else\n throw new Error('Set items must all have null values');\n }\n}\nYAMLSet.tag = 'tag:yaml.org,2002:set';\nconst set = {\n collection: 'map',\n identify: value => value instanceof Set,\n nodeClass: YAMLSet,\n default: false,\n tag: 'tag:yaml.org,2002:set',\n resolve(map, onError) {\n if (Node.isMap(map)) {\n if (map.hasAllNullValues(true))\n return Object.assign(new YAMLSet(), map);\n else\n onError('Set items must all have null values');\n }\n else\n onError('Expected a mapping for this tag');\n return map;\n },\n createNode(schema, iterable, ctx) {\n const { replacer } = ctx;\n const set = new YAMLSet(schema);\n if (iterable && Symbol.iterator in Object(iterable))\n for (let value of iterable) {\n if (typeof replacer === 'function')\n value = replacer.call(iterable, value, value);\n set.items.push(Pair.createPair(value, null, ctx));\n }\n return set;\n }\n};\n\nexports.YAMLSet = YAMLSet;\nexports.set = set;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\n/** Internal types handle bigint as number, because TS can't figure it out. */\nfunction parseSexagesimal(str, asBigInt) {\n const sign = str[0];\n const parts = sign === '-' || sign === '+' ? str.substring(1) : str;\n const num = (n) => asBigInt ? BigInt(n) : Number(n);\n const res = parts\n .replace(/_/g, '')\n .split(':')\n .reduce((res, p) => res * num(60) + num(p), num(0));\n return (sign === '-' ? num(-1) * res : res);\n}\n/**\n * hhhh:mm:ss.sss\n *\n * Internal types handle bigint as number, because TS can't figure it out.\n */\nfunction stringifySexagesimal(node) {\n let { value } = node;\n let num = (n) => n;\n if (typeof value === 'bigint')\n num = n => BigInt(n);\n else if (isNaN(value) || !isFinite(value))\n return stringifyNumber.stringifyNumber(node);\n let sign = '';\n if (value < 0) {\n sign = '-';\n value *= num(-1);\n }\n const _60 = num(60);\n const parts = [value % _60]; // seconds, including ms\n if (value < 60) {\n parts.unshift(0); // at least one : is required\n }\n else {\n value = (value - parts[0]) / _60;\n parts.unshift(value % _60); // minutes\n if (value >= 60) {\n value = (value - parts[0]) / _60;\n parts.unshift(value); // hours\n }\n }\n return (sign +\n parts\n .map(n => (n < 10 ? '0' + String(n) : String(n)))\n .join(':')\n .replace(/000000\\d*$/, '') // % 60 may introduce error\n );\n}\nconst intTime = {\n identify: value => typeof value === 'bigint' || Number.isInteger(value),\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'TIME',\n test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,\n resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),\n stringify: stringifySexagesimal\n};\nconst floatTime = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'TIME',\n test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*$/,\n resolve: str => parseSexagesimal(str, false),\n stringify: stringifySexagesimal\n};\nconst timestamp = {\n identify: value => value instanceof Date,\n default: true,\n tag: 'tag:yaml.org,2002:timestamp',\n // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part\n // may be omitted altogether, resulting in a date format. In such a case, the time part is\n // assumed to be 00:00:00Z (start of day, UTC).\n test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd\n '(?:' + // time is optional\n '(?:t|T|[ \\\\t]+)' + // t | T | whitespace\n '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?\n '(?:[ \\\\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30\n ')?$'),\n resolve(str) {\n const match = str.match(timestamp.test);\n if (!match)\n throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');\n const [, year, month, day, hour, minute, second] = match.map(Number);\n const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;\n let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);\n const tz = match[8];\n if (tz && tz !== 'Z') {\n let d = parseSexagesimal(tz, false);\n if (Math.abs(d) < 30)\n d *= 60;\n date -= 60000 * d;\n }\n return new Date(date);\n },\n stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\\.000Z$/, '')\n};\n\nexports.floatTime = floatTime;\nexports.intTime = intTime;\nexports.timestamp = timestamp;\n","'use strict';\n\nconst FOLD_FLOW = 'flow';\nconst FOLD_BLOCK = 'block';\nconst FOLD_QUOTED = 'quoted';\n/**\n * Tries to keep input at up to `lineWidth` characters, splitting only on spaces\n * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are\n * terminated with `\\n` and started with `indent`.\n */\nfunction foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {\n if (!lineWidth || lineWidth < 0)\n return text;\n const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);\n if (text.length <= endStep)\n return text;\n const folds = [];\n const escapedFolds = {};\n let end = lineWidth - indent.length;\n if (typeof indentAtStart === 'number') {\n if (indentAtStart > lineWidth - Math.max(2, minContentWidth))\n folds.push(0);\n else\n end = lineWidth - indentAtStart;\n }\n let split = undefined;\n let prev = undefined;\n let overflow = false;\n let i = -1;\n let escStart = -1;\n let escEnd = -1;\n if (mode === FOLD_BLOCK) {\n i = consumeMoreIndentedLines(text, i);\n if (i !== -1)\n end = i + endStep;\n }\n for (let ch; (ch = text[(i += 1)]);) {\n if (mode === FOLD_QUOTED && ch === '\\\\') {\n escStart = i;\n switch (text[i + 1]) {\n case 'x':\n i += 3;\n break;\n case 'u':\n i += 5;\n break;\n case 'U':\n i += 9;\n break;\n default:\n i += 1;\n }\n escEnd = i;\n }\n if (ch === '\\n') {\n if (mode === FOLD_BLOCK)\n i = consumeMoreIndentedLines(text, i);\n end = i + endStep;\n split = undefined;\n }\n else {\n if (ch === ' ' &&\n prev &&\n prev !== ' ' &&\n prev !== '\\n' &&\n prev !== '\\t') {\n // space surrounded by non-space can be replaced with newline + indent\n const next = text[i + 1];\n if (next && next !== ' ' && next !== '\\n' && next !== '\\t')\n split = i;\n }\n if (i >= end) {\n if (split) {\n folds.push(split);\n end = split + endStep;\n split = undefined;\n }\n else if (mode === FOLD_QUOTED) {\n // white-space collected at end may stretch past lineWidth\n while (prev === ' ' || prev === '\\t') {\n prev = ch;\n ch = text[(i += 1)];\n overflow = true;\n }\n // Account for newline escape, but don't break preceding escape\n const j = i > escEnd + 1 ? i - 2 : escStart - 1;\n // Bail out if lineWidth & minContentWidth are shorter than an escape string\n if (escapedFolds[j])\n return text;\n folds.push(j);\n escapedFolds[j] = true;\n end = j + endStep;\n split = undefined;\n }\n else {\n overflow = true;\n }\n }\n }\n prev = ch;\n }\n if (overflow && onOverflow)\n onOverflow();\n if (folds.length === 0)\n return text;\n if (onFold)\n onFold();\n let res = text.slice(0, folds[0]);\n for (let i = 0; i < folds.length; ++i) {\n const fold = folds[i];\n const end = folds[i + 1] || text.length;\n if (fold === 0)\n res = `\\n${indent}${text.slice(0, end)}`;\n else {\n if (mode === FOLD_QUOTED && escapedFolds[fold])\n res += `${text[fold]}\\\\`;\n res += `\\n${indent}${text.slice(fold + 1, end)}`;\n }\n }\n return res;\n}\n/**\n * Presumes `i + 1` is at the start of a line\n * @returns index of last newline in more-indented block\n */\nfunction consumeMoreIndentedLines(text, i) {\n let ch = text[i + 1];\n while (ch === ' ' || ch === '\\t') {\n do {\n ch = text[(i += 1)];\n } while (ch && ch !== '\\n');\n ch = text[i + 1];\n }\n return i;\n}\n\nexports.FOLD_BLOCK = FOLD_BLOCK;\nexports.FOLD_FLOW = FOLD_FLOW;\nexports.FOLD_QUOTED = FOLD_QUOTED;\nexports.foldFlowLines = foldFlowLines;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar Node = require('../nodes/Node.js');\nvar stringifyComment = require('./stringifyComment.js');\nvar stringifyString = require('./stringifyString.js');\n\nfunction createStringifyContext(doc, options) {\n const opt = Object.assign({\n blockQuote: true,\n commentString: stringifyComment.stringifyComment,\n defaultKeyType: null,\n defaultStringType: 'PLAIN',\n directives: null,\n doubleQuotedAsJSON: false,\n doubleQuotedMinMultiLineLength: 40,\n falseStr: 'false',\n flowCollectionPadding: true,\n indentSeq: true,\n lineWidth: 80,\n minContentWidth: 20,\n nullStr: 'null',\n simpleKeys: false,\n singleQuote: null,\n trueStr: 'true',\n verifyAliasOrder: true\n }, doc.schema.toStringOptions, options);\n let inFlow;\n switch (opt.collectionStyle) {\n case 'block':\n inFlow = false;\n break;\n case 'flow':\n inFlow = true;\n break;\n default:\n inFlow = null;\n }\n return {\n anchors: new Set(),\n doc,\n flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '',\n indent: '',\n indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',\n inFlow,\n options: opt\n };\n}\nfunction getTagObject(tags, item) {\n if (item.tag) {\n const match = tags.filter(t => t.tag === item.tag);\n if (match.length > 0)\n return match.find(t => t.format === item.format) ?? match[0];\n }\n let tagObj = undefined;\n let obj;\n if (Node.isScalar(item)) {\n obj = item.value;\n const match = tags.filter(t => t.identify?.(obj));\n tagObj =\n match.find(t => t.format === item.format) ?? match.find(t => !t.format);\n }\n else {\n obj = item;\n tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);\n }\n if (!tagObj) {\n const name = obj?.constructor?.name ?? typeof obj;\n throw new Error(`Tag not resolved for ${name} value`);\n }\n return tagObj;\n}\n// needs to be called before value stringifier to allow for circular anchor refs\nfunction stringifyProps(node, tagObj, { anchors: anchors$1, doc }) {\n if (!doc.directives)\n return '';\n const props = [];\n const anchor = (Node.isScalar(node) || Node.isCollection(node)) && node.anchor;\n if (anchor && anchors.anchorIsValid(anchor)) {\n anchors$1.add(anchor);\n props.push(`&${anchor}`);\n }\n const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;\n if (tag)\n props.push(doc.directives.tagString(tag));\n return props.join(' ');\n}\nfunction stringify(item, ctx, onComment, onChompKeep) {\n if (Node.isPair(item))\n return item.toString(ctx, onComment, onChompKeep);\n if (Node.isAlias(item)) {\n if (ctx.doc.directives)\n return item.toString(ctx);\n if (ctx.resolvedAliases?.has(item)) {\n throw new TypeError(`Cannot stringify circular structure without alias nodes`);\n }\n else {\n if (ctx.resolvedAliases)\n ctx.resolvedAliases.add(item);\n else\n ctx.resolvedAliases = new Set([item]);\n item = item.resolve(ctx.doc);\n }\n }\n let tagObj = undefined;\n const node = Node.isNode(item)\n ? item\n : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });\n if (!tagObj)\n tagObj = getTagObject(ctx.doc.schema.tags, node);\n const props = stringifyProps(node, tagObj, ctx);\n if (props.length > 0)\n ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1;\n const str = typeof tagObj.stringify === 'function'\n ? tagObj.stringify(node, ctx, onComment, onChompKeep)\n : Node.isScalar(node)\n ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep)\n : node.toString(ctx, onComment, onChompKeep);\n if (!props)\n return str;\n return Node.isScalar(node) || str[0] === '{' || str[0] === '['\n ? `${props} ${str}`\n : `${props}\\n${ctx.indent}${str}`;\n}\n\nexports.createStringifyContext = createStringifyContext;\nexports.stringify = stringify;\n","'use strict';\n\nvar Collection = require('../nodes/Collection.js');\nvar Node = require('../nodes/Node.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyCollection(collection, ctx, options) {\n const flow = ctx.inFlow ?? collection.flow;\n const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;\n return stringify(collection, ctx, options);\n}\nfunction stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {\n const { indent, options: { commentString } } = ctx;\n const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });\n let chompKeep = false; // flag for the preceding node's status\n const lines = [];\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n let comment = null;\n if (Node.isNode(item)) {\n if (!chompKeep && item.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, item.commentBefore, chompKeep);\n if (item.comment)\n comment = item.comment;\n }\n else if (Node.isPair(item)) {\n const ik = Node.isNode(item.key) ? item.key : null;\n if (ik) {\n if (!chompKeep && ik.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);\n }\n }\n chompKeep = false;\n let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true));\n if (comment)\n str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n if (chompKeep && comment)\n chompKeep = false;\n lines.push(blockItemPrefix + str);\n }\n let str;\n if (lines.length === 0) {\n str = flowChars.start + flowChars.end;\n }\n else {\n str = lines[0];\n for (let i = 1; i < lines.length; ++i) {\n const line = lines[i];\n str += line ? `\\n${indent}${line}` : '\\n';\n }\n }\n if (comment) {\n str += '\\n' + stringifyComment.indentComment(commentString(comment), indent);\n if (onComment)\n onComment();\n }\n else if (chompKeep && onChompKeep)\n onChompKeep();\n return str;\n}\nfunction stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {\n const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx;\n itemIndent += indentStep;\n const itemCtx = Object.assign({}, ctx, {\n indent: itemIndent,\n inFlow: true,\n type: null\n });\n let reqNewline = false;\n let linesAtValue = 0;\n const lines = [];\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n let comment = null;\n if (Node.isNode(item)) {\n if (item.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, item.commentBefore, false);\n if (item.comment)\n comment = item.comment;\n }\n else if (Node.isPair(item)) {\n const ik = Node.isNode(item.key) ? item.key : null;\n if (ik) {\n if (ik.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, ik.commentBefore, false);\n if (ik.comment)\n reqNewline = true;\n }\n const iv = Node.isNode(item.value) ? item.value : null;\n if (iv) {\n if (iv.comment)\n comment = iv.comment;\n if (iv.commentBefore)\n reqNewline = true;\n }\n else if (item.value == null && ik && ik.comment) {\n comment = ik.comment;\n }\n }\n if (comment)\n reqNewline = true;\n let str = stringify.stringify(item, itemCtx, () => (comment = null));\n if (i < items.length - 1)\n str += ',';\n if (comment)\n str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n if (!reqNewline && (lines.length > linesAtValue || str.includes('\\n')))\n reqNewline = true;\n lines.push(str);\n linesAtValue = lines.length;\n }\n let str;\n const { start, end } = flowChars;\n if (lines.length === 0) {\n str = start + end;\n }\n else {\n if (!reqNewline) {\n const len = lines.reduce((sum, line) => sum + line.length + 2, 2);\n reqNewline = len > Collection.Collection.maxFlowStringSingleLineLength;\n }\n if (reqNewline) {\n str = start;\n for (const line of lines)\n str += line ? `\\n${indentStep}${indent}${line}` : '\\n';\n str += `\\n${indent}${end}`;\n }\n else {\n str = `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`;\n }\n }\n if (comment) {\n str += stringifyComment.lineComment(str, indent, commentString(comment));\n if (onComment)\n onComment();\n }\n return str;\n}\nfunction addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {\n if (comment && chompKeep)\n comment = comment.replace(/^\\n+/, '');\n if (comment) {\n const ic = stringifyComment.indentComment(commentString(comment), indent);\n lines.push(ic.trimStart()); // Avoid double indent on first line\n }\n}\n\nexports.stringifyCollection = stringifyCollection;\n","'use strict';\n\n/**\n * Stringifies a comment.\n *\n * Empty comment lines are left empty,\n * lines consisting of a single space are replaced by `#`,\n * and all other lines are prefixed with a `#`.\n */\nconst stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');\nfunction indentComment(comment, indent) {\n if (/^\\n+$/.test(comment))\n return comment.substring(1);\n return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;\n}\nconst lineComment = (str, indent, comment) => str.endsWith('\\n')\n ? indentComment(comment, indent)\n : comment.includes('\\n')\n ? '\\n' + indentComment(comment, indent)\n : (str.endsWith(' ') ? '' : ' ') + comment;\n\nexports.indentComment = indentComment;\nexports.lineComment = lineComment;\nexports.stringifyComment = stringifyComment;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyDocument(doc, options) {\n const lines = [];\n let hasDirectives = options.directives === true;\n if (options.directives !== false && doc.directives) {\n const dir = doc.directives.toString(doc);\n if (dir) {\n lines.push(dir);\n hasDirectives = true;\n }\n else if (doc.directives.docStart)\n hasDirectives = true;\n }\n if (hasDirectives)\n lines.push('---');\n const ctx = stringify.createStringifyContext(doc, options);\n const { commentString } = ctx.options;\n if (doc.commentBefore) {\n if (lines.length !== 1)\n lines.unshift('');\n const cs = commentString(doc.commentBefore);\n lines.unshift(stringifyComment.indentComment(cs, ''));\n }\n let chompKeep = false;\n let contentComment = null;\n if (doc.contents) {\n if (Node.isNode(doc.contents)) {\n if (doc.contents.spaceBefore && hasDirectives)\n lines.push('');\n if (doc.contents.commentBefore) {\n const cs = commentString(doc.contents.commentBefore);\n lines.push(stringifyComment.indentComment(cs, ''));\n }\n // top-level block scalars need to be indented if followed by a comment\n ctx.forceBlockIndent = !!doc.comment;\n contentComment = doc.contents.comment;\n }\n const onChompKeep = contentComment ? undefined : () => (chompKeep = true);\n let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep);\n if (contentComment)\n body += stringifyComment.lineComment(body, '', commentString(contentComment));\n if ((body[0] === '|' || body[0] === '>') &&\n lines[lines.length - 1] === '---') {\n // Top-level block scalars with a preceding doc marker ought to use the\n // same line for their header.\n lines[lines.length - 1] = `--- ${body}`;\n }\n else\n lines.push(body);\n }\n else {\n lines.push(stringify.stringify(doc.contents, ctx));\n }\n if (doc.directives?.docEnd) {\n if (doc.comment) {\n const cs = commentString(doc.comment);\n if (cs.includes('\\n')) {\n lines.push('...');\n lines.push(stringifyComment.indentComment(cs, ''));\n }\n else {\n lines.push(`... ${cs}`);\n }\n }\n else {\n lines.push('...');\n }\n }\n else {\n let dc = doc.comment;\n if (dc && chompKeep)\n dc = dc.replace(/^\\n+/, '');\n if (dc) {\n if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')\n lines.push('');\n lines.push(stringifyComment.indentComment(commentString(dc), ''));\n }\n }\n return lines.join('\\n') + '\\n';\n}\n\nexports.stringifyDocument = stringifyDocument;\n","'use strict';\n\nfunction stringifyNumber({ format, minFractionDigits, tag, value }) {\n if (typeof value === 'bigint')\n return String(value);\n const num = typeof value === 'number' ? value : Number(value);\n if (!isFinite(num))\n return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';\n let n = JSON.stringify(value);\n if (!format &&\n minFractionDigits &&\n (!tag || tag === 'tag:yaml.org,2002:float') &&\n /^\\d/.test(n)) {\n let i = n.indexOf('.');\n if (i < 0) {\n i = n.length;\n n += '.';\n }\n let d = minFractionDigits - (n.length - i - 1);\n while (d-- > 0)\n n += '0';\n }\n return n;\n}\n\nexports.stringifyNumber = stringifyNumber;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyPair({ key, value }, ctx, onComment, onChompKeep) {\n const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;\n let keyComment = (Node.isNode(key) && key.comment) || null;\n if (simpleKeys) {\n if (keyComment) {\n throw new Error('With simple keys, key nodes cannot have comments');\n }\n if (Node.isCollection(key)) {\n const msg = 'With simple keys, collection cannot be used as a key value';\n throw new Error(msg);\n }\n }\n let explicitKey = !simpleKeys &&\n (!key ||\n (keyComment && value == null && !ctx.inFlow) ||\n Node.isCollection(key) ||\n (Node.isScalar(key)\n ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL\n : typeof key === 'object'));\n ctx = Object.assign({}, ctx, {\n allNullValues: false,\n implicitKey: !explicitKey && (simpleKeys || !allNullValues),\n indent: indent + indentStep\n });\n let keyCommentDone = false;\n let chompKeep = false;\n let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));\n if (!explicitKey && !ctx.inFlow && str.length > 1024) {\n if (simpleKeys)\n throw new Error('With simple keys, single line scalar must not span more than 1024 characters');\n explicitKey = true;\n }\n if (ctx.inFlow) {\n if (allNullValues || value == null) {\n if (keyCommentDone && onComment)\n onComment();\n return str === '' ? '?' : explicitKey ? `? ${str}` : str;\n }\n }\n else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {\n str = `? ${str}`;\n if (keyComment && !keyCommentDone) {\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n }\n else if (chompKeep && onChompKeep)\n onChompKeep();\n return str;\n }\n if (keyCommentDone)\n keyComment = null;\n if (explicitKey) {\n if (keyComment)\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n str = `? ${str}\\n${indent}:`;\n }\n else {\n str = `${str}:`;\n if (keyComment)\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n }\n let vsb, vcb, valueComment;\n if (Node.isNode(value)) {\n vsb = !!value.spaceBefore;\n vcb = value.commentBefore;\n valueComment = value.comment;\n }\n else {\n vsb = false;\n vcb = null;\n valueComment = null;\n if (value && typeof value === 'object')\n value = doc.createNode(value);\n }\n ctx.implicitKey = false;\n if (!explicitKey && !keyComment && Node.isScalar(value))\n ctx.indentAtStart = str.length + 1;\n chompKeep = false;\n if (!indentSeq &&\n indentStep.length >= 2 &&\n !ctx.inFlow &&\n !explicitKey &&\n Node.isSeq(value) &&\n !value.flow &&\n !value.tag &&\n !value.anchor) {\n // If indentSeq === false, consider '- ' as part of indentation where possible\n ctx.indent = ctx.indent.substring(2);\n }\n let valueCommentDone = false;\n const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));\n let ws = ' ';\n if (keyComment || vsb || vcb) {\n ws = vsb ? '\\n' : '';\n if (vcb) {\n const cs = commentString(vcb);\n ws += `\\n${stringifyComment.indentComment(cs, ctx.indent)}`;\n }\n if (valueStr === '' && !ctx.inFlow) {\n if (ws === '\\n')\n ws = '\\n\\n';\n }\n else {\n ws += `\\n${ctx.indent}`;\n }\n }\n else if (!explicitKey && Node.isCollection(value)) {\n const vs0 = valueStr[0];\n const nl0 = valueStr.indexOf('\\n');\n const hasNewline = nl0 !== -1;\n const flow = ctx.inFlow ?? value.flow ?? value.items.length === 0;\n if (hasNewline || !flow) {\n let hasPropsLine = false;\n if (hasNewline && (vs0 === '&' || vs0 === '!')) {\n let sp0 = valueStr.indexOf(' ');\n if (vs0 === '&' &&\n sp0 !== -1 &&\n sp0 < nl0 &&\n valueStr[sp0 + 1] === '!') {\n sp0 = valueStr.indexOf(' ', sp0 + 1);\n }\n if (sp0 === -1 || nl0 < sp0)\n hasPropsLine = true;\n }\n if (!hasPropsLine)\n ws = `\\n${ctx.indent}`;\n }\n }\n else if (valueStr === '' || valueStr[0] === '\\n') {\n ws = '';\n }\n str += ws + valueStr;\n if (ctx.inFlow) {\n if (valueCommentDone && onComment)\n onComment();\n }\n else if (valueComment && !valueCommentDone) {\n str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment));\n }\n else if (chompKeep && onChompKeep) {\n onChompKeep();\n }\n return str;\n}\n\nexports.stringifyPair = stringifyPair;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar foldFlowLines = require('./foldFlowLines.js');\n\nconst getFoldOptions = (ctx, isBlock) => ({\n indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart,\n lineWidth: ctx.options.lineWidth,\n minContentWidth: ctx.options.minContentWidth\n});\n// Also checks for lines starting with %, as parsing the output as YAML 1.1 will\n// presume that's starting a new document.\nconst containsDocumentMarker = (str) => /^(%|---|\\.\\.\\.)/m.test(str);\nfunction lineLengthOverLimit(str, lineWidth, indentLength) {\n if (!lineWidth || lineWidth < 0)\n return false;\n const limit = lineWidth - indentLength;\n const strLen = str.length;\n if (strLen <= limit)\n return false;\n for (let i = 0, start = 0; i < strLen; ++i) {\n if (str[i] === '\\n') {\n if (i - start > limit)\n return true;\n start = i + 1;\n if (strLen - start <= limit)\n return false;\n }\n }\n return true;\n}\nfunction doubleQuotedString(value, ctx) {\n const json = JSON.stringify(value);\n if (ctx.options.doubleQuotedAsJSON)\n return json;\n const { implicitKey } = ctx;\n const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n let str = '';\n let start = 0;\n for (let i = 0, ch = json[i]; ch; ch = json[++i]) {\n if (ch === ' ' && json[i + 1] === '\\\\' && json[i + 2] === 'n') {\n // space before newline needs to be escaped to not be folded\n str += json.slice(start, i) + '\\\\ ';\n i += 1;\n start = i;\n ch = '\\\\';\n }\n if (ch === '\\\\')\n switch (json[i + 1]) {\n case 'u':\n {\n str += json.slice(start, i);\n const code = json.substr(i + 2, 4);\n switch (code) {\n case '0000':\n str += '\\\\0';\n break;\n case '0007':\n str += '\\\\a';\n break;\n case '000b':\n str += '\\\\v';\n break;\n case '001b':\n str += '\\\\e';\n break;\n case '0085':\n str += '\\\\N';\n break;\n case '00a0':\n str += '\\\\_';\n break;\n case '2028':\n str += '\\\\L';\n break;\n case '2029':\n str += '\\\\P';\n break;\n default:\n if (code.substr(0, 2) === '00')\n str += '\\\\x' + code.substr(2);\n else\n str += json.substr(i, 6);\n }\n i += 5;\n start = i + 1;\n }\n break;\n case 'n':\n if (implicitKey ||\n json[i + 2] === '\"' ||\n json.length < minMultiLineLength) {\n i += 1;\n }\n else {\n // folding will eat first newline\n str += json.slice(start, i) + '\\n\\n';\n while (json[i + 2] === '\\\\' &&\n json[i + 3] === 'n' &&\n json[i + 4] !== '\"') {\n str += '\\n';\n i += 2;\n }\n str += indent;\n // space after newline needs to be escaped to not be folded\n if (json[i + 2] === ' ')\n str += '\\\\';\n i += 1;\n start = i + 1;\n }\n break;\n default:\n i += 1;\n }\n }\n str = start ? str + json.slice(start) : json;\n return implicitKey\n ? str\n : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx, false));\n}\nfunction singleQuotedString(value, ctx) {\n if (ctx.options.singleQuote === false ||\n (ctx.implicitKey && value.includes('\\n')) ||\n /[ \\t]\\n|\\n[ \\t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline\n )\n return doubleQuotedString(value, ctx);\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n const res = \"'\" + value.replace(/'/g, \"''\").replace(/\\n+/g, `$&\\n${indent}`) + \"'\";\n return ctx.implicitKey\n ? res\n : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false));\n}\nfunction quotedString(value, ctx) {\n const { singleQuote } = ctx.options;\n let qs;\n if (singleQuote === false)\n qs = doubleQuotedString;\n else {\n const hasDouble = value.includes('\"');\n const hasSingle = value.includes(\"'\");\n if (hasDouble && !hasSingle)\n qs = singleQuotedString;\n else if (hasSingle && !hasDouble)\n qs = doubleQuotedString;\n else\n qs = singleQuote ? singleQuotedString : doubleQuotedString;\n }\n return qs(value, ctx);\n}\nfunction blockString({ comment, type, value }, ctx, onComment, onChompKeep) {\n const { blockQuote, commentString, lineWidth } = ctx.options;\n // 1. Block can't end in whitespace unless the last line is non-empty.\n // 2. Strings consisting of only whitespace are best rendered explicitly.\n if (!blockQuote || /\\n[\\t ]+$/.test(value) || /^\\s*$/.test(value)) {\n return quotedString(value, ctx);\n }\n const indent = ctx.indent ||\n (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');\n const literal = blockQuote === 'literal'\n ? true\n : blockQuote === 'folded' || type === Scalar.Scalar.BLOCK_FOLDED\n ? false\n : type === Scalar.Scalar.BLOCK_LITERAL\n ? true\n : !lineLengthOverLimit(value, lineWidth, indent.length);\n if (!value)\n return literal ? '|\\n' : '>\\n';\n // determine chomping from whitespace at value end\n let chomp;\n let endStart;\n for (endStart = value.length; endStart > 0; --endStart) {\n const ch = value[endStart - 1];\n if (ch !== '\\n' && ch !== '\\t' && ch !== ' ')\n break;\n }\n let end = value.substring(endStart);\n const endNlPos = end.indexOf('\\n');\n if (endNlPos === -1) {\n chomp = '-'; // strip\n }\n else if (value === end || endNlPos !== end.length - 1) {\n chomp = '+'; // keep\n if (onChompKeep)\n onChompKeep();\n }\n else {\n chomp = ''; // clip\n }\n if (end) {\n value = value.slice(0, -end.length);\n if (end[end.length - 1] === '\\n')\n end = end.slice(0, -1);\n end = end.replace(/\\n+(?!\\n|$)/g, `$&${indent}`);\n }\n // determine indent indicator from whitespace at value start\n let startWithSpace = false;\n let startEnd;\n let startNlPos = -1;\n for (startEnd = 0; startEnd < value.length; ++startEnd) {\n const ch = value[startEnd];\n if (ch === ' ')\n startWithSpace = true;\n else if (ch === '\\n')\n startNlPos = startEnd;\n else\n break;\n }\n let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);\n if (start) {\n value = value.substring(start.length);\n start = start.replace(/\\n+/g, `$&${indent}`);\n }\n const indentSize = indent ? '2' : '1'; // root is at -1\n let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;\n if (comment) {\n header += ' ' + commentString(comment.replace(/ ?[\\r\\n]+/g, ' '));\n if (onComment)\n onComment();\n }\n if (literal) {\n value = value.replace(/\\n+/g, `$&${indent}`);\n return `${header}\\n${indent}${start}${value}${end}`;\n }\n value = value\n .replace(/\\n+/g, '\\n$&')\n .replace(/(?:^|\\n)([\\t ].*)(?:([\\n\\t ]*)\\n(?![\\n\\t ]))?/g, '$1$2') // more-indented lines aren't folded\n // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent\n .replace(/\\n+/g, `$&${indent}`);\n const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx, true));\n return `${header}\\n${indent}${body}`;\n}\nfunction plainString(item, ctx, onComment, onChompKeep) {\n const { type, value } = item;\n const { actualString, implicitKey, indent, indentStep, inFlow } = ctx;\n if ((implicitKey && /[\\n[\\]{},]/.test(value)) ||\n (inFlow && /[[\\]{},]/.test(value))) {\n return quotedString(value, ctx);\n }\n if (!value ||\n /^[\\n\\t ,[\\]{}#&*!|>'\"%@`]|^[?-]$|^[?-][ \\t]|[\\n:][ \\t]|[ \\t]\\n|[\\n\\t ]#|[\\n\\t :]$/.test(value)) {\n // not allowed:\n // - empty string, '-' or '?'\n // - start with an indicator character (except [?:-]) or /[?-] /\n // - '\\n ', ': ' or ' \\n' anywhere\n // - '#' not preceded by a non-space char\n // - end with ' ' or ':'\n return implicitKey || inFlow || !value.includes('\\n')\n ? quotedString(value, ctx)\n : blockString(item, ctx, onComment, onChompKeep);\n }\n if (!implicitKey &&\n !inFlow &&\n type !== Scalar.Scalar.PLAIN &&\n value.includes('\\n')) {\n // Where allowed & type not set explicitly, prefer block style for multiline strings\n return blockString(item, ctx, onComment, onChompKeep);\n }\n if (containsDocumentMarker(value)) {\n if (indent === '') {\n ctx.forceBlockIndent = true;\n return blockString(item, ctx, onComment, onChompKeep);\n }\n else if (implicitKey && indent === indentStep) {\n return quotedString(value, ctx);\n }\n }\n const str = value.replace(/\\n+/g, `$&\\n${indent}`);\n // Verify that output will be parsed as a string, as e.g. plain numbers and\n // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),\n // and others in v1.1.\n if (actualString) {\n const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str);\n const { compat, tags } = ctx.doc.schema;\n if (tags.some(test) || compat?.some(test))\n return quotedString(value, ctx);\n }\n return implicitKey\n ? str\n : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false));\n}\nfunction stringifyString(item, ctx, onComment, onChompKeep) {\n const { implicitKey, inFlow } = ctx;\n const ss = typeof item.value === 'string'\n ? item\n : Object.assign({}, item, { value: String(item.value) });\n let { type } = item;\n if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n // force double quotes on control characters & unpaired surrogates\n if (/[\\x00-\\x08\\x0b-\\x1f\\x7f-\\x9f\\u{D800}-\\u{DFFF}]/u.test(ss.value))\n type = Scalar.Scalar.QUOTE_DOUBLE;\n }\n const _stringify = (_type) => {\n switch (_type) {\n case Scalar.Scalar.BLOCK_FOLDED:\n case Scalar.Scalar.BLOCK_LITERAL:\n return implicitKey || inFlow\n ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers\n : blockString(ss, ctx, onComment, onChompKeep);\n case Scalar.Scalar.QUOTE_DOUBLE:\n return doubleQuotedString(ss.value, ctx);\n case Scalar.Scalar.QUOTE_SINGLE:\n return singleQuotedString(ss.value, ctx);\n case Scalar.Scalar.PLAIN:\n return plainString(ss, ctx, onComment, onChompKeep);\n default:\n return null;\n }\n };\n let res = _stringify(type);\n if (res === null) {\n const { defaultKeyType, defaultStringType } = ctx.options;\n const t = (implicitKey && defaultKeyType) || defaultStringType;\n res = _stringify(t);\n if (res === null)\n throw new Error(`Unsupported default string type ${t}`);\n }\n return res;\n}\n\nexports.stringifyString = stringifyString;\n","'use strict';\n\nvar Node = require('./nodes/Node.js');\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove node');\n/**\n * Apply a visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n * - `key`: For sequence values and map `Pair`, the node's index in the\n * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n * `null` for the root node.\n * - `node`: The current node.\n * - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this node, continue with next\n * sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current node, then continue with the next one\n * - `Node`: Replace the current node, then continue by visiting it\n * - `number`: While iterating the items of a sequence or map, set the index\n * of the next step. This is useful especially if the index of the current\n * node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nfunction visit(node, visitor) {\n const visitor_ = initVisitor(visitor);\n if (Node.isDocument(node)) {\n const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));\n if (cd === REMOVE)\n node.contents = null;\n }\n else\n visit_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisit.SKIP = SKIP;\n/** Remove the current node */\nvisit.REMOVE = REMOVE;\nfunction visit_(key, node, visitor, path) {\n const ctrl = callVisitor(key, node, visitor, path);\n if (Node.isNode(ctrl) || Node.isPair(ctrl)) {\n replaceNode(key, path, ctrl);\n return visit_(key, ctrl, visitor, path);\n }\n if (typeof ctrl !== 'symbol') {\n if (Node.isCollection(node)) {\n path = Object.freeze(path.concat(node));\n for (let i = 0; i < node.items.length; ++i) {\n const ci = visit_(i, node.items[i], visitor, path);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n node.items.splice(i, 1);\n i -= 1;\n }\n }\n }\n else if (Node.isPair(node)) {\n path = Object.freeze(path.concat(node));\n const ck = visit_('key', node.key, visitor, path);\n if (ck === BREAK)\n return BREAK;\n else if (ck === REMOVE)\n node.key = null;\n const cv = visit_('value', node.value, visitor, path);\n if (cv === BREAK)\n return BREAK;\n else if (cv === REMOVE)\n node.value = null;\n }\n }\n return ctrl;\n}\n/**\n * Apply an async visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n * - `key`: For sequence values and map `Pair`, the node's index in the\n * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n * `null` for the root node.\n * - `node`: The current node.\n * - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `Promise`: Must resolve to one of the following values\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this node, continue with next\n * sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current node, then continue with the next one\n * - `Node`: Replace the current node, then continue by visiting it\n * - `number`: While iterating the items of a sequence or map, set the index\n * of the next step. This is useful especially if the index of the current\n * node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nasync function visitAsync(node, visitor) {\n const visitor_ = initVisitor(visitor);\n if (Node.isDocument(node)) {\n const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));\n if (cd === REMOVE)\n node.contents = null;\n }\n else\n await visitAsync_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisitAsync.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisitAsync.SKIP = SKIP;\n/** Remove the current node */\nvisitAsync.REMOVE = REMOVE;\nasync function visitAsync_(key, node, visitor, path) {\n const ctrl = await callVisitor(key, node, visitor, path);\n if (Node.isNode(ctrl) || Node.isPair(ctrl)) {\n replaceNode(key, path, ctrl);\n return visitAsync_(key, ctrl, visitor, path);\n }\n if (typeof ctrl !== 'symbol') {\n if (Node.isCollection(node)) {\n path = Object.freeze(path.concat(node));\n for (let i = 0; i < node.items.length; ++i) {\n const ci = await visitAsync_(i, node.items[i], visitor, path);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n node.items.splice(i, 1);\n i -= 1;\n }\n }\n }\n else if (Node.isPair(node)) {\n path = Object.freeze(path.concat(node));\n const ck = await visitAsync_('key', node.key, visitor, path);\n if (ck === BREAK)\n return BREAK;\n else if (ck === REMOVE)\n node.key = null;\n const cv = await visitAsync_('value', node.value, visitor, path);\n if (cv === BREAK)\n return BREAK;\n else if (cv === REMOVE)\n node.value = null;\n }\n }\n return ctrl;\n}\nfunction initVisitor(visitor) {\n if (typeof visitor === 'object' &&\n (visitor.Collection || visitor.Node || visitor.Value)) {\n return Object.assign({\n Alias: visitor.Node,\n Map: visitor.Node,\n Scalar: visitor.Node,\n Seq: visitor.Node\n }, visitor.Value && {\n Map: visitor.Value,\n Scalar: visitor.Value,\n Seq: visitor.Value\n }, visitor.Collection && {\n Map: visitor.Collection,\n Seq: visitor.Collection\n }, visitor);\n }\n return visitor;\n}\nfunction callVisitor(key, node, visitor, path) {\n if (typeof visitor === 'function')\n return visitor(key, node, path);\n if (Node.isMap(node))\n return visitor.Map?.(key, node, path);\n if (Node.isSeq(node))\n return visitor.Seq?.(key, node, path);\n if (Node.isPair(node))\n return visitor.Pair?.(key, node, path);\n if (Node.isScalar(node))\n return visitor.Scalar?.(key, node, path);\n if (Node.isAlias(node))\n return visitor.Alias?.(key, node, path);\n return undefined;\n}\nfunction replaceNode(key, path, node) {\n const parent = path[path.length - 1];\n if (Node.isCollection(parent)) {\n parent.items[key] = node;\n }\n else if (Node.isPair(parent)) {\n if (key === 'key')\n parent.key = node;\n else\n parent.value = node;\n }\n else if (Node.isDocument(parent)) {\n parent.contents = node;\n }\n else {\n const pt = Node.isAlias(parent) ? 'alias' : 'scalar';\n throw new Error(`Cannot replace node with ${pt} parent`);\n }\n}\n\nexports.visit = visit;\nexports.visitAsync = visitAsync;\n","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","const core = require('@actions/core');\nconst fs = require('fs');\nconst glob = require('@actions/glob');\nconst path = require('path');\nconst yaml = require('yaml');\n\nconst sha1 = /\\b[a-f0-9]{40}\\b/i;\nconst sha256 = /\\b[A-Fa-f0-9]{64}\\b/i;\n\nasync function run() {\n try {\n const allowlist = core.getInput('allowlist');\n const isDryRun = core.getInput('dry_run') === 'true';\n const workflowsPath = process.env['ZG_WORKFLOWS_PATH'] || '.github/workflows';\n const globber = await glob.create([workflowsPath + '/*.yaml', workflowsPath + '/*.yml'].join('\\n'));\n let actionHasError = false;\n\n for await (const file of globber.globGenerator()) {\n const basename = path.basename(file);\n const fileContents = fs.readFileSync(file, 'utf8');\n const yamlContents = yaml.parse(fileContents);\n const jobs = yamlContents['jobs'];\n let fileHasError = false;\n\n if (jobs === undefined) {\n core.setFailed(`The \"${basename}\" workflow does not contain jobs.`);\n }\n\n core.startGroup(workflowsPath + '/' + basename);\n\n for (const job in jobs) {\n const uses = jobs[job]['uses'];\n const steps = jobs[job]['steps'];\n\n if (assertUsesVersion(uses)) {\n if (!assertUsesSha(uses) && !assertUsesAllowlist(uses, allowlist)) {\n actionHasError = true;\n fileHasError = true;\n\n reportError(`${uses} is not pinned to a full length commit SHA.`, isDryRun);\n }\n } else if (steps !== undefined) {\n for (const step of steps) {\n const uses = step['uses'];\n\n if (assertUsesVersion(uses) && !assertUsesSha(uses) && !assertUsesAllowlist(uses, allowlist)) {\n actionHasError = true;\n fileHasError = true;\n\n reportError(`${uses} is not pinned to a full length commit SHA.`, isDryRun);\n }\n }\n } else {\n core.warning(`The \"${job}\" job of the \"${basename}\" workflow does not contain uses or steps.`);\n }\n }\n\n if (!fileHasError) {\n core.info('No issues were found.')\n }\n\n core.endGroup();\n }\n\n if (!isDryRun && actionHasError) {\n throw new Error('At least one workflow contains an unpinned GitHub Action version.');\n }\n } catch (error) {\n core.setFailed(error.message);\n }\n}\n\nrun();\n\nfunction assertUsesVersion(uses) {\n return typeof uses === 'string' && uses.includes('@');\n}\n\nfunction assertUsesSha(uses) {\n if (uses.startsWith('docker://')) {\n return sha256.test(uses.substr(uses.indexOf('sha256:') + 7));\n }\n\n return sha1.test(uses.substr(uses.indexOf('@') + 1));\n}\n\nfunction assertUsesAllowlist(uses, allowlist) {\n if (!allowlist) {\n return false;\n }\n\n const action = uses.substr(0, uses.indexOf('@'));\n const isAllowed = allowlist.split(/\\r?\\n/).some((allow) => action.startsWith(allow));\n\n if(isAllowed) {\n core.info(`${action} matched allowlist — ignoring action.`)\n }\n\n return isAllowed;\n}\n\nfunction reportError(message, isDryRun) {\n if (isDryRun) {\n core.warning(message);\n } else {\n core.error(message);\n }\n}\n"],"names":[],"sourceRoot":""} \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 138a840..833e13d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,7 +11,7 @@ "dependencies": { "@actions/core": "^1.10.0", "@actions/glob": "^0.4.0", - "yaml": "^2.2.1" + "yaml": "^2.2.2" }, "devDependencies": { "@vercel/ncc": "^0.36.1", @@ -4241,9 +4241,9 @@ "dev": true }, "node_modules/yaml": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.2.1.tgz", - "integrity": "sha512-e0WHiYql7+9wr4cWMx3TVQrNwejKaEe7/rHNmQmqRjazfOP5W8PB6Jpebb5o6fIapbz9o9+2ipcaTM2ZwDI6lw==", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.2.2.tgz", + "integrity": "sha512-CBKFWExMn46Foo4cldiChEzn7S7SRV+wqiluAb6xmueD/fGyRHIhX8m14vVGgeFWjN540nKCNVj6P21eQjgTuA==", "engines": { "node": ">= 14" } @@ -7459,9 +7459,9 @@ "dev": true }, "yaml": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.2.1.tgz", - "integrity": "sha512-e0WHiYql7+9wr4cWMx3TVQrNwejKaEe7/rHNmQmqRjazfOP5W8PB6Jpebb5o6fIapbz9o9+2ipcaTM2ZwDI6lw==" + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.2.2.tgz", + "integrity": "sha512-CBKFWExMn46Foo4cldiChEzn7S7SRV+wqiluAb6xmueD/fGyRHIhX8m14vVGgeFWjN540nKCNVj6P21eQjgTuA==" }, "yargs": { "version": "17.7.1", diff --git a/package.json b/package.json index 84f040f..ee43eed 100644 --- a/package.json +++ b/package.json @@ -21,7 +21,7 @@ "dependencies": { "@actions/core": "^1.10.0", "@actions/glob": "^0.4.0", - "yaml": "^2.2.1" + "yaml": "^2.2.2" }, "devDependencies": { "@vercel/ncc": "^0.36.1",