import fs$l, { promises as promises$1 } from 'node:fs'; import fsp from 'node:fs/promises'; import path$o, { posix as posix$1, isAbsolute as isAbsolute$2, dirname as dirname$2, join as join$2, extname as extname$1, relative as relative$2, basename as basename$2 } from 'node:path'; import { fileURLToPath, URL as URL$3, parse as parse$i, pathToFileURL } from 'node:url'; import { promisify as promisify$4, format as format$2, inspect } from 'node:util'; import { performance } from 'node:perf_hooks'; import { createRequire as createRequire$1, builtinModules } from 'node:module'; import require$$0$3 from 'tty'; import require$$0$4, { win32, posix, isAbsolute as isAbsolute$1, resolve as resolve$3, relative as relative$1, basename as basename$1, extname, dirname as dirname$1, join as join$1, sep, normalize } from 'path'; import esbuild, { transform as transform$1, formatMessages, build as build$3 } from 'esbuild'; import { CLIENT_ENTRY, OPTIMIZABLE_ENTRY_RE, wildcardHosts, loopbackHosts, FS_PREFIX, CLIENT_PUBLIC_PATH, ENV_PUBLIC_PATH, DEFAULT_ASSETS_INLINE_LIMIT, CSS_LANGS_RE, ESBUILD_MODULES_TARGET, SPECIAL_QUERY_RE, ENV_ENTRY, DEP_VERSION_RE, DEFAULT_MAIN_FIELDS, DEFAULT_EXTENSIONS, KNOWN_ASSET_TYPES, JS_TYPES_RE, METADATA_FILENAME, VITE_PACKAGE_DIR, DEFAULT_DEV_PORT, CLIENT_DIR, VERSION, DEFAULT_PREVIEW_PORT, DEFAULT_ASSETS_RE, DEFAULT_CONFIG_FILES } from '../constants.js'; import * as require$$0$2 from 'fs'; import require$$0__default, { existsSync, readFileSync, statSync as statSync$1, readdirSync } from 'fs'; import require$$0$5 from 'events'; import require$$5 from 'assert'; import { createServer as createServer$3, STATUS_CODES, get as get$2 } from 'node:http'; import { createServer as createServer$2, get as get$1 } from 'node:https'; import require$$0$6 from 'util'; import require$$4$1 from 'net'; import require$$0$9 from 'url'; import require$$1 from 'http'; import require$$0$7 from 'stream'; import require$$2 from 'os'; import require$$2$1 from 'child_process'; import os$4 from 'node:os'; import { exec } from 'node:child_process'; import { createHash as createHash$2 } from 'node:crypto'; import { promises } from 'node:dns'; import require$$0$a from 'crypto'; import require$$0$8, { createRequire as createRequire$2 } from 'module'; import assert$1 from 'node:assert'; import v8 from 'node:v8'; import { Worker as Worker$1 } from 'node:worker_threads'; import { Buffer as Buffer$1 } from 'node:buffer'; import { EventEmitter as EventEmitter$4 } from 'node:events'; import { parseAst, parseAstAsync } from 'rollup/parseAst'; import * as qs from 'querystring'; import readline from 'node:readline'; import require$$0$b from 'zlib'; import require$$0$c from 'buffer'; import require$$1$1 from 'https'; import require$$4$2 from 'tls'; import zlib$1, { gzip } from 'node:zlib'; import { fileURLToPath as __cjs_fileURLToPath } from 'node:url'; import { dirname as __cjs_dirname } from 'node:path'; import { createRequire as __cjs_createRequire } from 'node:module'; const __filename = __cjs_fileURLToPath(import.meta.url); const __dirname = __cjs_dirname(__filename); const require = __cjs_createRequire(import.meta.url); const __require = require; var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {}; function getDefaultExportFromCjs (x) { return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x; } function getAugmentedNamespace(n) { if (n.__esModule) return n; var f = n.default; if (typeof f == "function") { var a = function a () { if (this instanceof a) { return Reflect.construct(f, arguments, this.constructor); } return f.apply(this, arguments); }; a.prototype = f.prototype; } else a = {}; Object.defineProperty(a, '__esModule', {value: true}); Object.keys(n).forEach(function (k) { var d = Object.getOwnPropertyDescriptor(n, k); Object.defineProperty(a, k, d.get ? d : { enumerable: true, get: function () { return n[k]; } }); }); return a; } var picocolors = {exports: {}}; let tty = require$$0$3; let isColorSupported = !("NO_COLOR" in process.env || process.argv.includes("--no-color")) && ("FORCE_COLOR" in process.env || process.argv.includes("--color") || process.platform === "win32" || (tty.isatty(1) && process.env.TERM !== "dumb") || "CI" in process.env); let formatter = (open, close, replace = open) => input => { let string = "" + input; let index = string.indexOf(close, open.length); return ~index ? open + replaceClose(string, close, replace, index) + close : open + string + close }; let replaceClose = (string, close, replace, index) => { let start = string.substring(0, index) + replace; let end = string.substring(index + close.length); let nextIndex = end.indexOf(close); return ~nextIndex ? start + replaceClose(end, close, replace, nextIndex) : start + end }; let createColors = (enabled = isColorSupported) => ({ isColorSupported: enabled, reset: enabled ? s => `\x1b[0m${s}\x1b[0m` : String, bold: enabled ? formatter("\x1b[1m", "\x1b[22m", "\x1b[22m\x1b[1m") : String, dim: enabled ? formatter("\x1b[2m", "\x1b[22m", "\x1b[22m\x1b[2m") : String, italic: enabled ? formatter("\x1b[3m", "\x1b[23m") : String, underline: enabled ? formatter("\x1b[4m", "\x1b[24m") : String, inverse: enabled ? formatter("\x1b[7m", "\x1b[27m") : String, hidden: enabled ? formatter("\x1b[8m", "\x1b[28m") : String, strikethrough: enabled ? formatter("\x1b[9m", "\x1b[29m") : String, black: enabled ? formatter("\x1b[30m", "\x1b[39m") : String, red: enabled ? formatter("\x1b[31m", "\x1b[39m") : String, green: enabled ? formatter("\x1b[32m", "\x1b[39m") : String, yellow: enabled ? formatter("\x1b[33m", "\x1b[39m") : String, blue: enabled ? formatter("\x1b[34m", "\x1b[39m") : String, magenta: enabled ? formatter("\x1b[35m", "\x1b[39m") : String, cyan: enabled ? formatter("\x1b[36m", "\x1b[39m") : String, white: enabled ? formatter("\x1b[37m", "\x1b[39m") : String, gray: enabled ? formatter("\x1b[90m", "\x1b[39m") : String, bgBlack: enabled ? formatter("\x1b[40m", "\x1b[49m") : String, bgRed: enabled ? formatter("\x1b[41m", "\x1b[49m") : String, bgGreen: enabled ? formatter("\x1b[42m", "\x1b[49m") : String, bgYellow: enabled ? formatter("\x1b[43m", "\x1b[49m") : String, bgBlue: enabled ? formatter("\x1b[44m", "\x1b[49m") : String, bgMagenta: enabled ? formatter("\x1b[45m", "\x1b[49m") : String, bgCyan: enabled ? formatter("\x1b[46m", "\x1b[49m") : String, bgWhite: enabled ? formatter("\x1b[47m", "\x1b[49m") : String, }); picocolors.exports = createColors(); picocolors.exports.createColors = createColors; var picocolorsExports = picocolors.exports; var colors$1 = /*@__PURE__*/getDefaultExportFromCjs(picocolorsExports); function matches$1(pattern, importee) { if (pattern instanceof RegExp) { return pattern.test(importee); } if (importee.length < pattern.length) { return false; } if (importee === pattern) { return true; } // eslint-disable-next-line prefer-template return importee.startsWith(pattern + '/'); } function getEntries({ entries, customResolver }) { if (!entries) { return []; } const resolverFunctionFromOptions = resolveCustomResolver(customResolver); if (Array.isArray(entries)) { return entries.map((entry) => { return { find: entry.find, replacement: entry.replacement, resolverFunction: resolveCustomResolver(entry.customResolver) || resolverFunctionFromOptions }; }); } return Object.entries(entries).map(([key, value]) => { return { find: key, replacement: value, resolverFunction: resolverFunctionFromOptions }; }); } function getHookFunction(hook) { if (typeof hook === 'function') { return hook; } if (hook && 'handler' in hook && typeof hook.handler === 'function') { return hook.handler; } return null; } function resolveCustomResolver(customResolver) { if (typeof customResolver === 'function') { return customResolver; } if (customResolver) { return getHookFunction(customResolver.resolveId); } return null; } function alias$1(options = {}) { const entries = getEntries(options); if (entries.length === 0) { return { name: 'alias', resolveId: () => null }; } return { name: 'alias', async buildStart(inputOptions) { await Promise.all([...(Array.isArray(options.entries) ? options.entries : []), options].map(({ customResolver }) => { var _a; return customResolver && ((_a = getHookFunction(customResolver.buildStart)) === null || _a === void 0 ? void 0 : _a.call(this, inputOptions)); })); }, resolveId(importee, importer, resolveOptions) { // First match is supposed to be the correct one const matchedEntry = entries.find((entry) => matches$1(entry.find, importee)); if (!matchedEntry) { return null; } const updatedId = importee.replace(matchedEntry.find, matchedEntry.replacement); if (matchedEntry.resolverFunction) { return matchedEntry.resolverFunction.call(this, updatedId, importer, resolveOptions); } return this.resolve(updatedId, importer, Object.assign({ skipSelf: true }, resolveOptions)).then((resolved) => { if (resolved) return resolved; if (!require$$0$4.isAbsolute(updatedId)) { this.warn(`rewrote ${importee} to ${updatedId} but was not an abolute path and was not handled by other plugins. ` + `This will lead to duplicated modules for the same path. ` + `To avoid duplicating modules, you should resolve to an absolute path.`); } return { id: updatedId }; }); } }; } /** * Prefix for resolved Ids that are not valid browser import specifiers */ const VALID_ID_PREFIX = `/@id/`; /** * Plugins that use 'virtual modules' (e.g. for helper functions), prefix the * module ID with `\0`, a convention from the rollup ecosystem. * This prevents other plugins from trying to process the id (like node resolution), * and core features like sourcemaps can use this info to differentiate between * virtual modules and regular files. * `\0` is not a permitted char in import URLs so we have to replace them during * import analysis. The id will be decoded back before entering the plugins pipeline. * These encoded virtual ids are also prefixed by the VALID_ID_PREFIX, so virtual * modules in the browser end up encoded as `/@id/__x00__{id}` */ const NULL_BYTE_PLACEHOLDER = `__x00__`; let SOURCEMAPPING_URL = 'sourceMa'; SOURCEMAPPING_URL += 'ppingURL'; const VITE_RUNTIME_SOURCEMAPPING_SOURCE = '//# sourceMappingSource=vite-runtime'; const isWindows$5 = typeof process !== 'undefined' && process.platform === 'win32'; /** * Prepend `/@id/` and replace null byte so the id is URL-safe. * This is prepended to resolved ids that are not valid browser * import specifiers by the importAnalysis plugin. */ function wrapId$1(id) { return id.startsWith(VALID_ID_PREFIX) ? id : VALID_ID_PREFIX + id.replace('\0', NULL_BYTE_PLACEHOLDER); } /** * Undo {@link wrapId}'s `/@id/` and null byte replacements. */ function unwrapId$1(id) { return id.startsWith(VALID_ID_PREFIX) ? id.slice(VALID_ID_PREFIX.length).replace(NULL_BYTE_PLACEHOLDER, '\0') : id; } const windowsSlashRE = /\\/g; function slash$1(p) { return p.replace(windowsSlashRE, '/'); } const postfixRE = /[?#].*$/; function cleanUrl(url) { return url.replace(postfixRE, ''); } function withTrailingSlash(path) { if (path[path.length - 1] !== '/') { return `${path}/`; } return path; } // eslint-disable-next-line @typescript-eslint/no-empty-function const AsyncFunction = async function () { }.constructor; // https://github.com/nodejs/node/issues/43047#issuecomment-1564068099 const asyncFunctionDeclarationPaddingLineCount = /** #__PURE__ */ (() => { const body = '/*code*/'; const source = new AsyncFunction('a', 'b', body).toString(); return source.slice(0, source.indexOf(body)).split('\n').length - 1; })(); // @ts-check /** @typedef { import('estree').BaseNode} BaseNode */ /** @typedef {{ skip: () => void; remove: () => void; replace: (node: BaseNode) => void; }} WalkerContext */ let WalkerBase$1 = class WalkerBase { constructor() { /** @type {boolean} */ this.should_skip = false; /** @type {boolean} */ this.should_remove = false; /** @type {BaseNode | null} */ this.replacement = null; /** @type {WalkerContext} */ this.context = { skip: () => (this.should_skip = true), remove: () => (this.should_remove = true), replace: (node) => (this.replacement = node) }; } /** * * @param {any} parent * @param {string} prop * @param {number} index * @param {BaseNode} node */ replace(parent, prop, index, node) { if (parent) { if (index !== null) { parent[prop][index] = node; } else { parent[prop] = node; } } } /** * * @param {any} parent * @param {string} prop * @param {number} index */ remove(parent, prop, index) { if (parent) { if (index !== null) { parent[prop].splice(index, 1); } else { delete parent[prop]; } } } }; // @ts-check /** @typedef { import('estree').BaseNode} BaseNode */ /** @typedef { import('./walker.js').WalkerContext} WalkerContext */ /** @typedef {( * this: WalkerContext, * node: BaseNode, * parent: BaseNode, * key: string, * index: number * ) => void} SyncHandler */ let SyncWalker$1 = class SyncWalker extends WalkerBase$1 { /** * * @param {SyncHandler} enter * @param {SyncHandler} leave */ constructor(enter, leave) { super(); /** @type {SyncHandler} */ this.enter = enter; /** @type {SyncHandler} */ this.leave = leave; } /** * * @param {BaseNode} node * @param {BaseNode} parent * @param {string} [prop] * @param {number} [index] * @returns {BaseNode} */ visit(node, parent, prop, index) { if (node) { if (this.enter) { const _should_skip = this.should_skip; const _should_remove = this.should_remove; const _replacement = this.replacement; this.should_skip = false; this.should_remove = false; this.replacement = null; this.enter.call(this.context, node, parent, prop, index); if (this.replacement) { node = this.replacement; this.replace(parent, prop, index, node); } if (this.should_remove) { this.remove(parent, prop, index); } const skipped = this.should_skip; const removed = this.should_remove; this.should_skip = _should_skip; this.should_remove = _should_remove; this.replacement = _replacement; if (skipped) return node; if (removed) return null; } for (const key in node) { const value = node[key]; if (typeof value !== "object") { continue; } else if (Array.isArray(value)) { for (let i = 0; i < value.length; i += 1) { if (value[i] !== null && typeof value[i].type === 'string') { if (!this.visit(value[i], node, key, i)) { // removed i--; } } } } else if (value !== null && typeof value.type === "string") { this.visit(value, node, key, null); } } if (this.leave) { const _replacement = this.replacement; const _should_remove = this.should_remove; this.replacement = null; this.should_remove = false; this.leave.call(this.context, node, parent, prop, index); if (this.replacement) { node = this.replacement; this.replace(parent, prop, index, node); } if (this.should_remove) { this.remove(parent, prop, index); } const removed = this.should_remove; this.replacement = _replacement; this.should_remove = _should_remove; if (removed) return null; } } return node; } }; // @ts-check /** @typedef { import('estree').BaseNode} BaseNode */ /** @typedef { import('./sync.js').SyncHandler} SyncHandler */ /** @typedef { import('./async.js').AsyncHandler} AsyncHandler */ /** * * @param {BaseNode} ast * @param {{ * enter?: SyncHandler * leave?: SyncHandler * }} walker * @returns {BaseNode} */ function walk$3(ast, { enter, leave }) { const instance = new SyncWalker$1(enter, leave); return instance.visit(ast, null); } var utils$k = {}; const path$n = require$$0$4; const WIN_SLASH = '\\\\/'; const WIN_NO_SLASH = `[^${WIN_SLASH}]`; /** * Posix glob regex */ const DOT_LITERAL = '\\.'; const PLUS_LITERAL = '\\+'; const QMARK_LITERAL = '\\?'; const SLASH_LITERAL = '\\/'; const ONE_CHAR = '(?=.)'; const QMARK = '[^/]'; const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; const NO_DOT = `(?!${DOT_LITERAL})`; const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; const STAR$1 = `${QMARK}*?`; const POSIX_CHARS = { DOT_LITERAL, PLUS_LITERAL, QMARK_LITERAL, SLASH_LITERAL, ONE_CHAR, QMARK, END_ANCHOR, DOTS_SLASH, NO_DOT, NO_DOTS, NO_DOT_SLASH, NO_DOTS_SLASH, QMARK_NO_DOT, STAR: STAR$1, START_ANCHOR }; /** * Windows glob regex */ const WINDOWS_CHARS = { ...POSIX_CHARS, SLASH_LITERAL: `[${WIN_SLASH}]`, QMARK: WIN_NO_SLASH, STAR: `${WIN_NO_SLASH}*?`, DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, NO_DOT: `(?!${DOT_LITERAL})`, NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, QMARK_NO_DOT: `[^.${WIN_SLASH}]`, START_ANCHOR: `(?:^|[${WIN_SLASH}])`, END_ANCHOR: `(?:[${WIN_SLASH}]|$)` }; /** * POSIX Bracket Regex */ const POSIX_REGEX_SOURCE$1 = { alnum: 'a-zA-Z0-9', alpha: 'a-zA-Z', ascii: '\\x00-\\x7F', blank: ' \\t', cntrl: '\\x00-\\x1F\\x7F', digit: '0-9', graph: '\\x21-\\x7E', lower: 'a-z', print: '\\x20-\\x7E ', punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', space: ' \\t\\r\\n\\v\\f', upper: 'A-Z', word: 'A-Za-z0-9_', xdigit: 'A-Fa-f0-9' }; var constants$6 = { MAX_LENGTH: 1024 * 64, POSIX_REGEX_SOURCE: POSIX_REGEX_SOURCE$1, // regular expressions REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, // Replace globs with equivalent patterns to reduce parsing time. REPLACEMENTS: { '***': '*', '**/**': '**', '**/**/**': '**' }, // Digits CHAR_0: 48, /* 0 */ CHAR_9: 57, /* 9 */ // Alphabet chars. CHAR_UPPERCASE_A: 65, /* A */ CHAR_LOWERCASE_A: 97, /* a */ CHAR_UPPERCASE_Z: 90, /* Z */ CHAR_LOWERCASE_Z: 122, /* z */ CHAR_LEFT_PARENTHESES: 40, /* ( */ CHAR_RIGHT_PARENTHESES: 41, /* ) */ CHAR_ASTERISK: 42, /* * */ // Non-alphabetic chars. CHAR_AMPERSAND: 38, /* & */ CHAR_AT: 64, /* @ */ CHAR_BACKWARD_SLASH: 92, /* \ */ CHAR_CARRIAGE_RETURN: 13, /* \r */ CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ CHAR_COLON: 58, /* : */ CHAR_COMMA: 44, /* , */ CHAR_DOT: 46, /* . */ CHAR_DOUBLE_QUOTE: 34, /* " */ CHAR_EQUAL: 61, /* = */ CHAR_EXCLAMATION_MARK: 33, /* ! */ CHAR_FORM_FEED: 12, /* \f */ CHAR_FORWARD_SLASH: 47, /* / */ CHAR_GRAVE_ACCENT: 96, /* ` */ CHAR_HASH: 35, /* # */ CHAR_HYPHEN_MINUS: 45, /* - */ CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ CHAR_LEFT_CURLY_BRACE: 123, /* { */ CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ CHAR_LINE_FEED: 10, /* \n */ CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ CHAR_PERCENT: 37, /* % */ CHAR_PLUS: 43, /* + */ CHAR_QUESTION_MARK: 63, /* ? */ CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ CHAR_RIGHT_CURLY_BRACE: 125, /* } */ CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ CHAR_SEMICOLON: 59, /* ; */ CHAR_SINGLE_QUOTE: 39, /* ' */ CHAR_SPACE: 32, /* */ CHAR_TAB: 9, /* \t */ CHAR_UNDERSCORE: 95, /* _ */ CHAR_VERTICAL_LINE: 124, /* | */ CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ SEP: path$n.sep, /** * Create EXTGLOB_CHARS */ extglobChars(chars) { return { '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, '?': { type: 'qmark', open: '(?:', close: ')?' }, '+': { type: 'plus', open: '(?:', close: ')+' }, '*': { type: 'star', open: '(?:', close: ')*' }, '@': { type: 'at', open: '(?:', close: ')' } }; }, /** * Create GLOB_CHARS */ globChars(win32) { return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; } }; (function (exports) { const path = require$$0$4; const win32 = process.platform === 'win32'; const { REGEX_BACKSLASH, REGEX_REMOVE_BACKSLASH, REGEX_SPECIAL_CHARS, REGEX_SPECIAL_CHARS_GLOBAL } = constants$6; exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); exports.removeBackslashes = str => { return str.replace(REGEX_REMOVE_BACKSLASH, match => { return match === '\\' ? '' : match; }); }; exports.supportsLookbehinds = () => { const segs = process.version.slice(1).split('.').map(Number); if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) { return true; } return false; }; exports.isWindows = options => { if (options && typeof options.windows === 'boolean') { return options.windows; } return win32 === true || path.sep === '\\'; }; exports.escapeLast = (input, char, lastIdx) => { const idx = input.lastIndexOf(char, lastIdx); if (idx === -1) return input; if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); return `${input.slice(0, idx)}\\${input.slice(idx)}`; }; exports.removePrefix = (input, state = {}) => { let output = input; if (output.startsWith('./')) { output = output.slice(2); state.prefix = './'; } return output; }; exports.wrapOutput = (input, state = {}, options = {}) => { const prepend = options.contains ? '' : '^'; const append = options.contains ? '' : '$'; let output = `${prepend}(?:${input})${append}`; if (state.negated === true) { output = `(?:^(?!${output}).*$)`; } return output; }; } (utils$k)); const utils$j = utils$k; const { CHAR_ASTERISK, /* * */ CHAR_AT, /* @ */ CHAR_BACKWARD_SLASH, /* \ */ CHAR_COMMA: CHAR_COMMA$1, /* , */ CHAR_DOT: CHAR_DOT$1, /* . */ CHAR_EXCLAMATION_MARK, /* ! */ CHAR_FORWARD_SLASH, /* / */ CHAR_LEFT_CURLY_BRACE: CHAR_LEFT_CURLY_BRACE$1, /* { */ CHAR_LEFT_PARENTHESES: CHAR_LEFT_PARENTHESES$1, /* ( */ CHAR_LEFT_SQUARE_BRACKET: CHAR_LEFT_SQUARE_BRACKET$1, /* [ */ CHAR_PLUS, /* + */ CHAR_QUESTION_MARK, /* ? */ CHAR_RIGHT_CURLY_BRACE: CHAR_RIGHT_CURLY_BRACE$1, /* } */ CHAR_RIGHT_PARENTHESES: CHAR_RIGHT_PARENTHESES$1, /* ) */ CHAR_RIGHT_SQUARE_BRACKET: CHAR_RIGHT_SQUARE_BRACKET$1 /* ] */ } = constants$6; const isPathSeparator = code => { return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; }; const depth = token => { if (token.isPrefix !== true) { token.depth = token.isGlobstar ? Infinity : 1; } }; /** * Quickly scans a glob pattern and returns an object with a handful of * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). * * ```js * const pm = require('picomatch'); * console.log(pm.scan('foo/bar/*.js')); * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } * ``` * @param {String} `str` * @param {Object} `options` * @return {Object} Returns an object with tokens and regex source string. * @api public */ const scan$2 = (input, options) => { const opts = options || {}; const length = input.length - 1; const scanToEnd = opts.parts === true || opts.scanToEnd === true; const slashes = []; const tokens = []; const parts = []; let str = input; let index = -1; let start = 0; let lastIndex = 0; let isBrace = false; let isBracket = false; let isGlob = false; let isExtglob = false; let isGlobstar = false; let braceEscaped = false; let backslashes = false; let negated = false; let negatedExtglob = false; let finished = false; let braces = 0; let prev; let code; let token = { value: '', depth: 0, isGlob: false }; const eos = () => index >= length; const peek = () => str.charCodeAt(index + 1); const advance = () => { prev = code; return str.charCodeAt(++index); }; while (index < length) { code = advance(); let next; if (code === CHAR_BACKWARD_SLASH) { backslashes = token.backslashes = true; code = advance(); if (code === CHAR_LEFT_CURLY_BRACE$1) { braceEscaped = true; } continue; } if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE$1) { braces++; while (eos() !== true && (code = advance())) { if (code === CHAR_BACKWARD_SLASH) { backslashes = token.backslashes = true; advance(); continue; } if (code === CHAR_LEFT_CURLY_BRACE$1) { braces++; continue; } if (braceEscaped !== true && code === CHAR_DOT$1 && (code = advance()) === CHAR_DOT$1) { isBrace = token.isBrace = true; isGlob = token.isGlob = true; finished = true; if (scanToEnd === true) { continue; } break; } if (braceEscaped !== true && code === CHAR_COMMA$1) { isBrace = token.isBrace = true; isGlob = token.isGlob = true; finished = true; if (scanToEnd === true) { continue; } break; } if (code === CHAR_RIGHT_CURLY_BRACE$1) { braces--; if (braces === 0) { braceEscaped = false; isBrace = token.isBrace = true; finished = true; break; } } } if (scanToEnd === true) { continue; } break; } if (code === CHAR_FORWARD_SLASH) { slashes.push(index); tokens.push(token); token = { value: '', depth: 0, isGlob: false }; if (finished === true) continue; if (prev === CHAR_DOT$1 && index === (start + 1)) { start += 2; continue; } lastIndex = index + 1; continue; } if (opts.noext !== true) { const isExtglobChar = code === CHAR_PLUS || code === CHAR_AT || code === CHAR_ASTERISK || code === CHAR_QUESTION_MARK || code === CHAR_EXCLAMATION_MARK; if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES$1) { isGlob = token.isGlob = true; isExtglob = token.isExtglob = true; finished = true; if (code === CHAR_EXCLAMATION_MARK && index === start) { negatedExtglob = true; } if (scanToEnd === true) { while (eos() !== true && (code = advance())) { if (code === CHAR_BACKWARD_SLASH) { backslashes = token.backslashes = true; code = advance(); continue; } if (code === CHAR_RIGHT_PARENTHESES$1) { isGlob = token.isGlob = true; finished = true; break; } } continue; } break; } } if (code === CHAR_ASTERISK) { if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; isGlob = token.isGlob = true; finished = true; if (scanToEnd === true) { continue; } break; } if (code === CHAR_QUESTION_MARK) { isGlob = token.isGlob = true; finished = true; if (scanToEnd === true) { continue; } break; } if (code === CHAR_LEFT_SQUARE_BRACKET$1) { while (eos() !== true && (next = advance())) { if (next === CHAR_BACKWARD_SLASH) { backslashes = token.backslashes = true; advance(); continue; } if (next === CHAR_RIGHT_SQUARE_BRACKET$1) { isBracket = token.isBracket = true; isGlob = token.isGlob = true; finished = true; break; } } if (scanToEnd === true) { continue; } break; } if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { negated = token.negated = true; start++; continue; } if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES$1) { isGlob = token.isGlob = true; if (scanToEnd === true) { while (eos() !== true && (code = advance())) { if (code === CHAR_LEFT_PARENTHESES$1) { backslashes = token.backslashes = true; code = advance(); continue; } if (code === CHAR_RIGHT_PARENTHESES$1) { finished = true; break; } } continue; } break; } if (isGlob === true) { finished = true; if (scanToEnd === true) { continue; } break; } } if (opts.noext === true) { isExtglob = false; isGlob = false; } let base = str; let prefix = ''; let glob = ''; if (start > 0) { prefix = str.slice(0, start); str = str.slice(start); lastIndex -= start; } if (base && isGlob === true && lastIndex > 0) { base = str.slice(0, lastIndex); glob = str.slice(lastIndex); } else if (isGlob === true) { base = ''; glob = str; } else { base = str; } if (base && base !== '' && base !== '/' && base !== str) { if (isPathSeparator(base.charCodeAt(base.length - 1))) { base = base.slice(0, -1); } } if (opts.unescape === true) { if (glob) glob = utils$j.removeBackslashes(glob); if (base && backslashes === true) { base = utils$j.removeBackslashes(base); } } const state = { prefix, input, start, base, glob, isBrace, isBracket, isGlob, isExtglob, isGlobstar, negated, negatedExtglob }; if (opts.tokens === true) { state.maxDepth = 0; if (!isPathSeparator(code)) { tokens.push(token); } state.tokens = tokens; } if (opts.parts === true || opts.tokens === true) { let prevIndex; for (let idx = 0; idx < slashes.length; idx++) { const n = prevIndex ? prevIndex + 1 : start; const i = slashes[idx]; const value = input.slice(n, i); if (opts.tokens) { if (idx === 0 && start !== 0) { tokens[idx].isPrefix = true; tokens[idx].value = prefix; } else { tokens[idx].value = value; } depth(tokens[idx]); state.maxDepth += tokens[idx].depth; } if (idx !== 0 || value !== '') { parts.push(value); } prevIndex = i; } if (prevIndex && prevIndex + 1 < input.length) { const value = input.slice(prevIndex + 1); parts.push(value); if (opts.tokens) { tokens[tokens.length - 1].value = value; depth(tokens[tokens.length - 1]); state.maxDepth += tokens[tokens.length - 1].depth; } } state.slashes = slashes; state.parts = parts; } return state; }; var scan_1 = scan$2; const constants$5 = constants$6; const utils$i = utils$k; /** * Constants */ const { MAX_LENGTH: MAX_LENGTH$1, POSIX_REGEX_SOURCE, REGEX_NON_SPECIAL_CHARS, REGEX_SPECIAL_CHARS_BACKREF, REPLACEMENTS } = constants$5; /** * Helpers */ const expandRange = (args, options) => { if (typeof options.expandRange === 'function') { return options.expandRange(...args, options); } args.sort(); const value = `[${args.join('-')}]`; return value; }; /** * Create the message for a syntax error */ const syntaxError = (type, char) => { return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; }; /** * Parse the given input string. * @param {String} input * @param {Object} options * @return {Object} */ const parse$h = (input, options) => { if (typeof input !== 'string') { throw new TypeError('Expected a string'); } input = REPLACEMENTS[input] || input; const opts = { ...options }; const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH$1, opts.maxLength) : MAX_LENGTH$1; let len = input.length; if (len > max) { throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); } const bos = { type: 'bos', value: '', output: opts.prepend || '' }; const tokens = [bos]; const capture = opts.capture ? '' : '?:'; const win32 = utils$i.isWindows(options); // create constants based on platform, for windows or posix const PLATFORM_CHARS = constants$5.globChars(win32); const EXTGLOB_CHARS = constants$5.extglobChars(PLATFORM_CHARS); const { DOT_LITERAL, PLUS_LITERAL, SLASH_LITERAL, ONE_CHAR, DOTS_SLASH, NO_DOT, NO_DOT_SLASH, NO_DOTS_SLASH, QMARK, QMARK_NO_DOT, STAR, START_ANCHOR } = PLATFORM_CHARS; const globstar = opts => { return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; }; const nodot = opts.dot ? '' : NO_DOT; const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; let star = opts.bash === true ? globstar(opts) : STAR; if (opts.capture) { star = `(${star})`; } // minimatch options support if (typeof opts.noext === 'boolean') { opts.noextglob = opts.noext; } const state = { input, index: -1, start: 0, dot: opts.dot === true, consumed: '', output: '', prefix: '', backtrack: false, negated: false, brackets: 0, braces: 0, parens: 0, quotes: 0, globstar: false, tokens }; input = utils$i.removePrefix(input, state); len = input.length; const extglobs = []; const braces = []; const stack = []; let prev = bos; let value; /** * Tokenizing helpers */ const eos = () => state.index === len - 1; const peek = state.peek = (n = 1) => input[state.index + n]; const advance = state.advance = () => input[++state.index] || ''; const remaining = () => input.slice(state.index + 1); const consume = (value = '', num = 0) => { state.consumed += value; state.index += num; }; const append = token => { state.output += token.output != null ? token.output : token.value; consume(token.value); }; const negate = () => { let count = 1; while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) { advance(); state.start++; count++; } if (count % 2 === 0) { return false; } state.negated = true; state.start++; return true; }; const increment = type => { state[type]++; stack.push(type); }; const decrement = type => { state[type]--; stack.pop(); }; /** * Push tokens onto the tokens array. This helper speeds up * tokenizing by 1) helping us avoid backtracking as much as possible, * and 2) helping us avoid creating extra tokens when consecutive * characters are plain text. This improves performance and simplifies * lookbehinds. */ const push = tok => { if (prev.type === 'globstar') { const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { state.output = state.output.slice(0, -prev.output.length); prev.type = 'star'; prev.value = '*'; prev.output = star; state.output += prev.output; } } if (extglobs.length && tok.type !== 'paren') { extglobs[extglobs.length - 1].inner += tok.value; } if (tok.value || tok.output) append(tok); if (prev && prev.type === 'text' && tok.type === 'text') { prev.value += tok.value; prev.output = (prev.output || '') + tok.value; return; } tok.prev = prev; tokens.push(tok); prev = tok; }; const extglobOpen = (type, value) => { const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; token.prev = prev; token.parens = state.parens; token.output = state.output; const output = (opts.capture ? '(' : '') + token.open; increment('parens'); push({ type, value, output: state.output ? '' : ONE_CHAR }); push({ type: 'paren', extglob: true, value: advance(), output }); extglobs.push(token); }; const extglobClose = token => { let output = token.close + (opts.capture ? ')' : ''); let rest; if (token.type === 'negate') { let extglobStar = star; if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { extglobStar = globstar(opts); } if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { output = token.close = `)$))${extglobStar}`; } if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. // In this case, we need to parse the string and use it in the output of the original pattern. // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. // // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. const expression = parse$h(rest, { ...options, fastpaths: false }).output; output = token.close = `)${expression})${extglobStar})`; } if (token.prev.type === 'bos') { state.negatedExtglob = true; } } push({ type: 'paren', extglob: true, value, output }); decrement('parens'); }; /** * Fast paths */ if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { let backslashes = false; let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { if (first === '\\') { backslashes = true; return m; } if (first === '?') { if (esc) { return esc + first + (rest ? QMARK.repeat(rest.length) : ''); } if (index === 0) { return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); } return QMARK.repeat(chars.length); } if (first === '.') { return DOT_LITERAL.repeat(chars.length); } if (first === '*') { if (esc) { return esc + first + (rest ? star : ''); } return star; } return esc ? m : `\\${m}`; }); if (backslashes === true) { if (opts.unescape === true) { output = output.replace(/\\/g, ''); } else { output = output.replace(/\\+/g, m => { return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); }); } } if (output === input && opts.contains === true) { state.output = input; return state; } state.output = utils$i.wrapOutput(output, state, options); return state; } /** * Tokenize input until we reach end-of-string */ while (!eos()) { value = advance(); if (value === '\u0000') { continue; } /** * Escaped characters */ if (value === '\\') { const next = peek(); if (next === '/' && opts.bash !== true) { continue; } if (next === '.' || next === ';') { continue; } if (!next) { value += '\\'; push({ type: 'text', value }); continue; } // collapse slashes to reduce potential for exploits const match = /^\\+/.exec(remaining()); let slashes = 0; if (match && match[0].length > 2) { slashes = match[0].length; state.index += slashes; if (slashes % 2 !== 0) { value += '\\'; } } if (opts.unescape === true) { value = advance(); } else { value += advance(); } if (state.brackets === 0) { push({ type: 'text', value }); continue; } } /** * If we're inside a regex character class, continue * until we reach the closing bracket. */ if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { if (opts.posix !== false && value === ':') { const inner = prev.value.slice(1); if (inner.includes('[')) { prev.posix = true; if (inner.includes(':')) { const idx = prev.value.lastIndexOf('['); const pre = prev.value.slice(0, idx); const rest = prev.value.slice(idx + 2); const posix = POSIX_REGEX_SOURCE[rest]; if (posix) { prev.value = pre + posix; state.backtrack = true; advance(); if (!bos.output && tokens.indexOf(prev) === 1) { bos.output = ONE_CHAR; } continue; } } } } if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { value = `\\${value}`; } if (value === ']' && (prev.value === '[' || prev.value === '[^')) { value = `\\${value}`; } if (opts.posix === true && value === '!' && prev.value === '[') { value = '^'; } prev.value += value; append({ value }); continue; } /** * If we're inside a quoted string, continue * until we reach the closing double quote. */ if (state.quotes === 1 && value !== '"') { value = utils$i.escapeRegex(value); prev.value += value; append({ value }); continue; } /** * Double quotes */ if (value === '"') { state.quotes = state.quotes === 1 ? 0 : 1; if (opts.keepQuotes === true) { push({ type: 'text', value }); } continue; } /** * Parentheses */ if (value === '(') { increment('parens'); push({ type: 'paren', value }); continue; } if (value === ')') { if (state.parens === 0 && opts.strictBrackets === true) { throw new SyntaxError(syntaxError('opening', '(')); } const extglob = extglobs[extglobs.length - 1]; if (extglob && state.parens === extglob.parens + 1) { extglobClose(extglobs.pop()); continue; } push({ type: 'paren', value, output: state.parens ? ')' : '\\)' }); decrement('parens'); continue; } /** * Square brackets */ if (value === '[') { if (opts.nobracket === true || !remaining().includes(']')) { if (opts.nobracket !== true && opts.strictBrackets === true) { throw new SyntaxError(syntaxError('closing', ']')); } value = `\\${value}`; } else { increment('brackets'); } push({ type: 'bracket', value }); continue; } if (value === ']') { if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { push({ type: 'text', value, output: `\\${value}` }); continue; } if (state.brackets === 0) { if (opts.strictBrackets === true) { throw new SyntaxError(syntaxError('opening', '[')); } push({ type: 'text', value, output: `\\${value}` }); continue; } decrement('brackets'); const prevValue = prev.value.slice(1); if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { value = `/${value}`; } prev.value += value; append({ value }); // when literal brackets are explicitly disabled // assume we should match with a regex character class if (opts.literalBrackets === false || utils$i.hasRegexChars(prevValue)) { continue; } const escaped = utils$i.escapeRegex(prev.value); state.output = state.output.slice(0, -prev.value.length); // when literal brackets are explicitly enabled // assume we should escape the brackets to match literal characters if (opts.literalBrackets === true) { state.output += escaped; prev.value = escaped; continue; } // when the user specifies nothing, try to match both prev.value = `(${capture}${escaped}|${prev.value})`; state.output += prev.value; continue; } /** * Braces */ if (value === '{' && opts.nobrace !== true) { increment('braces'); const open = { type: 'brace', value, output: '(', outputIndex: state.output.length, tokensIndex: state.tokens.length }; braces.push(open); push(open); continue; } if (value === '}') { const brace = braces[braces.length - 1]; if (opts.nobrace === true || !brace) { push({ type: 'text', value, output: value }); continue; } let output = ')'; if (brace.dots === true) { const arr = tokens.slice(); const range = []; for (let i = arr.length - 1; i >= 0; i--) { tokens.pop(); if (arr[i].type === 'brace') { break; } if (arr[i].type !== 'dots') { range.unshift(arr[i].value); } } output = expandRange(range, opts); state.backtrack = true; } if (brace.comma !== true && brace.dots !== true) { const out = state.output.slice(0, brace.outputIndex); const toks = state.tokens.slice(brace.tokensIndex); brace.value = brace.output = '\\{'; value = output = '\\}'; state.output = out; for (const t of toks) { state.output += (t.output || t.value); } } push({ type: 'brace', value, output }); decrement('braces'); braces.pop(); continue; } /** * Pipes */ if (value === '|') { if (extglobs.length > 0) { extglobs[extglobs.length - 1].conditions++; } push({ type: 'text', value }); continue; } /** * Commas */ if (value === ',') { let output = value; const brace = braces[braces.length - 1]; if (brace && stack[stack.length - 1] === 'braces') { brace.comma = true; output = '|'; } push({ type: 'comma', value, output }); continue; } /** * Slashes */ if (value === '/') { // if the beginning of the glob is "./", advance the start // to the current index, and don't add the "./" characters // to the state. This greatly simplifies lookbehinds when // checking for BOS characters like "!" and "." (not "./") if (prev.type === 'dot' && state.index === state.start + 1) { state.start = state.index + 1; state.consumed = ''; state.output = ''; tokens.pop(); prev = bos; // reset "prev" to the first token continue; } push({ type: 'slash', value, output: SLASH_LITERAL }); continue; } /** * Dots */ if (value === '.') { if (state.braces > 0 && prev.type === 'dot') { if (prev.value === '.') prev.output = DOT_LITERAL; const brace = braces[braces.length - 1]; prev.type = 'dots'; prev.output += value; prev.value += value; brace.dots = true; continue; } if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { push({ type: 'text', value, output: DOT_LITERAL }); continue; } push({ type: 'dot', value, output: DOT_LITERAL }); continue; } /** * Question marks */ if (value === '?') { const isGroup = prev && prev.value === '('; if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { extglobOpen('qmark', value); continue; } if (prev && prev.type === 'paren') { const next = peek(); let output = value; if (next === '<' && !utils$i.supportsLookbehinds()) { throw new Error('Node.js v10 or higher is required for regex lookbehinds'); } if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { output = `\\${value}`; } push({ type: 'text', value, output }); continue; } if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { push({ type: 'qmark', value, output: QMARK_NO_DOT }); continue; } push({ type: 'qmark', value, output: QMARK }); continue; } /** * Exclamation */ if (value === '!') { if (opts.noextglob !== true && peek() === '(') { if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { extglobOpen('negate', value); continue; } } if (opts.nonegate !== true && state.index === 0) { negate(); continue; } } /** * Plus */ if (value === '+') { if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { extglobOpen('plus', value); continue; } if ((prev && prev.value === '(') || opts.regex === false) { push({ type: 'plus', value, output: PLUS_LITERAL }); continue; } if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { push({ type: 'plus', value }); continue; } push({ type: 'plus', value: PLUS_LITERAL }); continue; } /** * Plain text */ if (value === '@') { if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { push({ type: 'at', extglob: true, value, output: '' }); continue; } push({ type: 'text', value }); continue; } /** * Plain text */ if (value !== '*') { if (value === '$' || value === '^') { value = `\\${value}`; } const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); if (match) { value += match[0]; state.index += match[0].length; } push({ type: 'text', value }); continue; } /** * Stars */ if (prev && (prev.type === 'globstar' || prev.star === true)) { prev.type = 'star'; prev.star = true; prev.value += value; prev.output = star; state.backtrack = true; state.globstar = true; consume(value); continue; } let rest = remaining(); if (opts.noextglob !== true && /^\([^?]/.test(rest)) { extglobOpen('star', value); continue; } if (prev.type === 'star') { if (opts.noglobstar === true) { consume(value); continue; } const prior = prev.prev; const before = prior.prev; const isStart = prior.type === 'slash' || prior.type === 'bos'; const afterStar = before && (before.type === 'star' || before.type === 'globstar'); if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { push({ type: 'star', value, output: '' }); continue; } const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { push({ type: 'star', value, output: '' }); continue; } // strip consecutive `/**/` while (rest.slice(0, 3) === '/**') { const after = input[state.index + 4]; if (after && after !== '/') { break; } rest = rest.slice(3); consume('/**', 3); } if (prior.type === 'bos' && eos()) { prev.type = 'globstar'; prev.value += value; prev.output = globstar(opts); state.output = prev.output; state.globstar = true; consume(value); continue; } if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { state.output = state.output.slice(0, -(prior.output + prev.output).length); prior.output = `(?:${prior.output}`; prev.type = 'globstar'; prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); prev.value += value; state.globstar = true; state.output += prior.output + prev.output; consume(value); continue; } if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { const end = rest[1] !== void 0 ? '|$' : ''; state.output = state.output.slice(0, -(prior.output + prev.output).length); prior.output = `(?:${prior.output}`; prev.type = 'globstar'; prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; prev.value += value; state.output += prior.output + prev.output; state.globstar = true; consume(value + advance()); push({ type: 'slash', value: '/', output: '' }); continue; } if (prior.type === 'bos' && rest[0] === '/') { prev.type = 'globstar'; prev.value += value; prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; state.output = prev.output; state.globstar = true; consume(value + advance()); push({ type: 'slash', value: '/', output: '' }); continue; } // remove single star from output state.output = state.output.slice(0, -prev.output.length); // reset previous token to globstar prev.type = 'globstar'; prev.output = globstar(opts); prev.value += value; // reset output with globstar state.output += prev.output; state.globstar = true; consume(value); continue; } const token = { type: 'star', value, output: star }; if (opts.bash === true) { token.output = '.*?'; if (prev.type === 'bos' || prev.type === 'slash') { token.output = nodot + token.output; } push(token); continue; } if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { token.output = value; push(token); continue; } if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { if (prev.type === 'dot') { state.output += NO_DOT_SLASH; prev.output += NO_DOT_SLASH; } else if (opts.dot === true) { state.output += NO_DOTS_SLASH; prev.output += NO_DOTS_SLASH; } else { state.output += nodot; prev.output += nodot; } if (peek() !== '*') { state.output += ONE_CHAR; prev.output += ONE_CHAR; } } push(token); } while (state.brackets > 0) { if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); state.output = utils$i.escapeLast(state.output, '['); decrement('brackets'); } while (state.parens > 0) { if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); state.output = utils$i.escapeLast(state.output, '('); decrement('parens'); } while (state.braces > 0) { if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); state.output = utils$i.escapeLast(state.output, '{'); decrement('braces'); } if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); } // rebuild the output if we had to backtrack at any point if (state.backtrack === true) { state.output = ''; for (const token of state.tokens) { state.output += token.output != null ? token.output : token.value; if (token.suffix) { state.output += token.suffix; } } } return state; }; /** * Fast paths for creating regular expressions for common glob patterns. * This can significantly speed up processing and has very little downside * impact when none of the fast paths match. */ parse$h.fastpaths = (input, options) => { const opts = { ...options }; const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH$1, opts.maxLength) : MAX_LENGTH$1; const len = input.length; if (len > max) { throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); } input = REPLACEMENTS[input] || input; const win32 = utils$i.isWindows(options); // create constants based on platform, for windows or posix const { DOT_LITERAL, SLASH_LITERAL, ONE_CHAR, DOTS_SLASH, NO_DOT, NO_DOTS, NO_DOTS_SLASH, STAR, START_ANCHOR } = constants$5.globChars(win32); const nodot = opts.dot ? NO_DOTS : NO_DOT; const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; const capture = opts.capture ? '' : '?:'; const state = { negated: false, prefix: '' }; let star = opts.bash === true ? '.*?' : STAR; if (opts.capture) { star = `(${star})`; } const globstar = opts => { if (opts.noglobstar === true) return star; return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; }; const create = str => { switch (str) { case '*': return `${nodot}${ONE_CHAR}${star}`; case '.*': return `${DOT_LITERAL}${ONE_CHAR}${star}`; case '*.*': return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; case '*/*': return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; case '**': return nodot + globstar(opts); case '**/*': return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; case '**/*.*': return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; case '**/.*': return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; default: { const match = /^(.*?)\.(\w+)$/.exec(str); if (!match) return; const source = create(match[1]); if (!source) return; return source + DOT_LITERAL + match[2]; } } }; const output = utils$i.removePrefix(input, state); let source = create(output); if (source && opts.strictSlashes !== true) { source += `${SLASH_LITERAL}?`; } return source; }; var parse_1$3 = parse$h; const path$m = require$$0$4; const scan$1 = scan_1; const parse$g = parse_1$3; const utils$h = utils$k; const constants$4 = constants$6; const isObject$3 = val => val && typeof val === 'object' && !Array.isArray(val); /** * Creates a matcher function from one or more glob patterns. The * returned function takes a string to match as its first argument, * and returns true if the string is a match. The returned matcher * function also takes a boolean as the second argument that, when true, * returns an object with additional information. * * ```js * const picomatch = require('picomatch'); * // picomatch(glob[, options]); * * const isMatch = picomatch('*.!(*a)'); * console.log(isMatch('a.a')); //=> false * console.log(isMatch('a.b')); //=> true * ``` * @name picomatch * @param {String|Array} `globs` One or more glob patterns. * @param {Object=} `options` * @return {Function=} Returns a matcher function. * @api public */ const picomatch$5 = (glob, options, returnState = false) => { if (Array.isArray(glob)) { const fns = glob.map(input => picomatch$5(input, options, returnState)); const arrayMatcher = str => { for (const isMatch of fns) { const state = isMatch(str); if (state) return state; } return false; }; return arrayMatcher; } const isState = isObject$3(glob) && glob.tokens && glob.input; if (glob === '' || (typeof glob !== 'string' && !isState)) { throw new TypeError('Expected pattern to be a non-empty string'); } const opts = options || {}; const posix = utils$h.isWindows(options); const regex = isState ? picomatch$5.compileRe(glob, options) : picomatch$5.makeRe(glob, options, false, true); const state = regex.state; delete regex.state; let isIgnored = () => false; if (opts.ignore) { const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; isIgnored = picomatch$5(opts.ignore, ignoreOpts, returnState); } const matcher = (input, returnObject = false) => { const { isMatch, match, output } = picomatch$5.test(input, regex, options, { glob, posix }); const result = { glob, state, regex, posix, input, output, match, isMatch }; if (typeof opts.onResult === 'function') { opts.onResult(result); } if (isMatch === false) { result.isMatch = false; return returnObject ? result : false; } if (isIgnored(input)) { if (typeof opts.onIgnore === 'function') { opts.onIgnore(result); } result.isMatch = false; return returnObject ? result : false; } if (typeof opts.onMatch === 'function') { opts.onMatch(result); } return returnObject ? result : true; }; if (returnState) { matcher.state = state; } return matcher; }; /** * Test `input` with the given `regex`. This is used by the main * `picomatch()` function to test the input string. * * ```js * const picomatch = require('picomatch'); * // picomatch.test(input, regex[, options]); * * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } * ``` * @param {String} `input` String to test. * @param {RegExp} `regex` * @return {Object} Returns an object with matching info. * @api public */ picomatch$5.test = (input, regex, options, { glob, posix } = {}) => { if (typeof input !== 'string') { throw new TypeError('Expected input to be a string'); } if (input === '') { return { isMatch: false, output: '' }; } const opts = options || {}; const format = opts.format || (posix ? utils$h.toPosixSlashes : null); let match = input === glob; let output = (match && format) ? format(input) : input; if (match === false) { output = format ? format(input) : input; match = output === glob; } if (match === false || opts.capture === true) { if (opts.matchBase === true || opts.basename === true) { match = picomatch$5.matchBase(input, regex, options, posix); } else { match = regex.exec(output); } } return { isMatch: Boolean(match), match, output }; }; /** * Match the basename of a filepath. * * ```js * const picomatch = require('picomatch'); * // picomatch.matchBase(input, glob[, options]); * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true * ``` * @param {String} `input` String to test. * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). * @return {Boolean} * @api public */ picomatch$5.matchBase = (input, glob, options, posix = utils$h.isWindows(options)) => { const regex = glob instanceof RegExp ? glob : picomatch$5.makeRe(glob, options); return regex.test(path$m.basename(input)); }; /** * Returns true if **any** of the given glob `patterns` match the specified `string`. * * ```js * const picomatch = require('picomatch'); * // picomatch.isMatch(string, patterns[, options]); * * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false * ``` * @param {String|Array} str The string to test. * @param {String|Array} patterns One or more glob patterns to use for matching. * @param {Object} [options] See available [options](#options). * @return {Boolean} Returns true if any patterns match `str` * @api public */ picomatch$5.isMatch = (str, patterns, options) => picomatch$5(patterns, options)(str); /** * Parse a glob pattern to create the source string for a regular * expression. * * ```js * const picomatch = require('picomatch'); * const result = picomatch.parse(pattern[, options]); * ``` * @param {String} `pattern` * @param {Object} `options` * @return {Object} Returns an object with useful properties and output to be used as a regex source string. * @api public */ picomatch$5.parse = (pattern, options) => { if (Array.isArray(pattern)) return pattern.map(p => picomatch$5.parse(p, options)); return parse$g(pattern, { ...options, fastpaths: false }); }; /** * Scan a glob pattern to separate the pattern into segments. * * ```js * const picomatch = require('picomatch'); * // picomatch.scan(input[, options]); * * const result = picomatch.scan('!./foo/*.js'); * console.log(result); * { prefix: '!./', * input: '!./foo/*.js', * start: 3, * base: 'foo', * glob: '*.js', * isBrace: false, * isBracket: false, * isGlob: true, * isExtglob: false, * isGlobstar: false, * negated: true } * ``` * @param {String} `input` Glob pattern to scan. * @param {Object} `options` * @return {Object} Returns an object with * @api public */ picomatch$5.scan = (input, options) => scan$1(input, options); /** * Compile a regular expression from the `state` object returned by the * [parse()](#parse) method. * * @param {Object} `state` * @param {Object} `options` * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. * @return {RegExp} * @api public */ picomatch$5.compileRe = (state, options, returnOutput = false, returnState = false) => { if (returnOutput === true) { return state.output; } const opts = options || {}; const prepend = opts.contains ? '' : '^'; const append = opts.contains ? '' : '$'; let source = `${prepend}(?:${state.output})${append}`; if (state && state.negated === true) { source = `^(?!${source}).*$`; } const regex = picomatch$5.toRegex(source, options); if (returnState === true) { regex.state = state; } return regex; }; /** * Create a regular expression from a parsed glob pattern. * * ```js * const picomatch = require('picomatch'); * const state = picomatch.parse('*.js'); * // picomatch.compileRe(state[, options]); * * console.log(picomatch.compileRe(state)); * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ * ``` * @param {String} `state` The object returned from the `.parse` method. * @param {Object} `options` * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. * @return {RegExp} Returns a regex created from the given pattern. * @api public */ picomatch$5.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { if (!input || typeof input !== 'string') { throw new TypeError('Expected a non-empty string'); } let parsed = { negated: false, fastpaths: true }; if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { parsed.output = parse$g.fastpaths(input, options); } if (!parsed.output) { parsed = parse$g(input, options); } return picomatch$5.compileRe(parsed, options, returnOutput, returnState); }; /** * Create a regular expression from the given regex source string. * * ```js * const picomatch = require('picomatch'); * // picomatch.toRegex(source[, options]); * * const { output } = picomatch.parse('*.js'); * console.log(picomatch.toRegex(output)); * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ * ``` * @param {String} `source` Regular expression source string. * @param {Object} `options` * @return {RegExp} * @api public */ picomatch$5.toRegex = (source, options) => { try { const opts = options || {}; return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); } catch (err) { if (options && options.debug === true) throw err; return /$^/; } }; /** * Picomatch constants. * @return {Object} */ picomatch$5.constants = constants$4; /** * Expose "picomatch" */ var picomatch_1 = picomatch$5; var picomatch$3 = picomatch_1; var picomatch$4 = /*@__PURE__*/getDefaultExportFromCjs(picomatch$3); const extractors = { ArrayPattern(names, param) { for (const element of param.elements) { if (element) extractors[element.type](names, element); } }, AssignmentPattern(names, param) { extractors[param.left.type](names, param.left); }, Identifier(names, param) { names.push(param.name); }, MemberExpression() { }, ObjectPattern(names, param) { for (const prop of param.properties) { // @ts-ignore Typescript reports that this is not a valid type if (prop.type === 'RestElement') { extractors.RestElement(names, prop); } else { extractors[prop.value.type](names, prop.value); } } }, RestElement(names, param) { extractors[param.argument.type](names, param.argument); } }; const extractAssignedNames = function extractAssignedNames(param) { const names = []; extractors[param.type](names, param); return names; }; const blockDeclarations = { const: true, let: true }; let Scope$1 = class Scope { constructor(options = {}) { this.parent = options.parent; this.isBlockScope = !!options.block; this.declarations = Object.create(null); if (options.params) { options.params.forEach((param) => { extractAssignedNames(param).forEach((name) => { this.declarations[name] = true; }); }); } } addDeclaration(node, isBlockDeclaration, isVar) { if (!isBlockDeclaration && this.isBlockScope) { // it's a `var` or function node, and this // is a block scope, so we need to go up this.parent.addDeclaration(node, isBlockDeclaration, isVar); } else if (node.id) { extractAssignedNames(node.id).forEach((name) => { this.declarations[name] = true; }); } } contains(name) { return this.declarations[name] || (this.parent ? this.parent.contains(name) : false); } }; const attachScopes = function attachScopes(ast, propertyName = 'scope') { let scope = new Scope$1(); walk$3(ast, { enter(n, parent) { const node = n; // function foo () {...} // class Foo {...} if (/(Function|Class)Declaration/.test(node.type)) { scope.addDeclaration(node, false, false); } // var foo = 1 if (node.type === 'VariableDeclaration') { const { kind } = node; const isBlockDeclaration = blockDeclarations[kind]; node.declarations.forEach((declaration) => { scope.addDeclaration(declaration, isBlockDeclaration, true); }); } let newScope; // create new function scope if (/Function/.test(node.type)) { const func = node; newScope = new Scope$1({ parent: scope, block: false, params: func.params }); // named function expressions - the name is considered // part of the function's scope if (func.type === 'FunctionExpression' && func.id) { newScope.addDeclaration(func, false, false); } } // create new for scope if (/For(In|Of)?Statement/.test(node.type)) { newScope = new Scope$1({ parent: scope, block: true }); } // create new block scope if (node.type === 'BlockStatement' && !/Function/.test(parent.type)) { newScope = new Scope$1({ parent: scope, block: true }); } // catch clause has its own block scope if (node.type === 'CatchClause') { newScope = new Scope$1({ parent: scope, params: node.param ? [node.param] : [], block: true }); } if (newScope) { Object.defineProperty(node, propertyName, { value: newScope, configurable: true }); scope = newScope; } }, leave(n) { const node = n; if (node[propertyName]) scope = scope.parent; } }); return scope; }; // Helper since Typescript can't detect readonly arrays with Array.isArray function isArray$1(arg) { return Array.isArray(arg); } function ensureArray(thing) { if (isArray$1(thing)) return thing; if (thing == null) return []; return [thing]; } const normalizePath$5 = function normalizePath(filename) { return filename.split(win32.sep).join(posix.sep); }; function getMatcherString(id, resolutionBase) { if (resolutionBase === false || isAbsolute$1(id) || id.startsWith('**')) { return normalizePath$5(id); } // resolve('') is valid and will default to process.cwd() const basePath = normalizePath$5(resolve$3(resolutionBase || '')) // escape all possible (posix + win) path characters that might interfere with regex .replace(/[-^$*+?.()|[\]{}]/g, '\\$&'); // Note that we use posix.join because: // 1. the basePath has been normalized to use / // 2. the incoming glob (id) matcher, also uses / // otherwise Node will force backslash (\) on windows return posix.join(basePath, normalizePath$5(id)); } const createFilter$1 = function createFilter(include, exclude, options) { const resolutionBase = options && options.resolve; const getMatcher = (id) => id instanceof RegExp ? id : { test: (what) => { // this refactor is a tad overly verbose but makes for easy debugging const pattern = getMatcherString(id, resolutionBase); const fn = picomatch$4(pattern, { dot: true }); const result = fn(what); return result; } }; const includeMatchers = ensureArray(include).map(getMatcher); const excludeMatchers = ensureArray(exclude).map(getMatcher); return function result(id) { if (typeof id !== 'string') return false; if (/\0/.test(id)) return false; const pathId = normalizePath$5(id); for (let i = 0; i < excludeMatchers.length; ++i) { const matcher = excludeMatchers[i]; if (matcher.test(pathId)) return false; } for (let i = 0; i < includeMatchers.length; ++i) { const matcher = includeMatchers[i]; if (matcher.test(pathId)) return true; } return !includeMatchers.length; }; }; const reservedWords$1 = 'break case class catch const continue debugger default delete do else export extends finally for function if import in instanceof let new return super switch this throw try typeof var void while with yield enum await implements package protected static interface private public'; const builtins = 'arguments Infinity NaN undefined null true false eval uneval isFinite isNaN parseFloat parseInt decodeURI decodeURIComponent encodeURI encodeURIComponent escape unescape Object Function Boolean Symbol Error EvalError InternalError RangeError ReferenceError SyntaxError TypeError URIError Number Math Date String RegExp Array Int8Array Uint8Array Uint8ClampedArray Int16Array Uint16Array Int32Array Uint32Array Float32Array Float64Array Map Set WeakMap WeakSet SIMD ArrayBuffer DataView JSON Promise Generator GeneratorFunction Reflect Proxy Intl'; const forbiddenIdentifiers = new Set(`${reservedWords$1} ${builtins}`.split(' ')); forbiddenIdentifiers.add(''); const makeLegalIdentifier = function makeLegalIdentifier(str) { let identifier = str .replace(/-(\w)/g, (_, letter) => letter.toUpperCase()) .replace(/[^$_a-zA-Z0-9]/g, '_'); if (/\d/.test(identifier[0]) || forbiddenIdentifiers.has(identifier)) { identifier = `_${identifier}`; } return identifier || '_'; }; function stringify$8(obj) { return (JSON.stringify(obj) || 'undefined').replace(/[\u2028\u2029]/g, (char) => `\\u${`000${char.charCodeAt(0).toString(16)}`.slice(-4)}`); } function serializeArray(arr, indent, baseIndent) { let output = '['; const separator = indent ? `\n${baseIndent}${indent}` : ''; for (let i = 0; i < arr.length; i++) { const key = arr[i]; output += `${i > 0 ? ',' : ''}${separator}${serialize(key, indent, baseIndent + indent)}`; } return `${output}${indent ? `\n${baseIndent}` : ''}]`; } function serializeObject(obj, indent, baseIndent) { let output = '{'; const separator = indent ? `\n${baseIndent}${indent}` : ''; const entries = Object.entries(obj); for (let i = 0; i < entries.length; i++) { const [key, value] = entries[i]; const stringKey = makeLegalIdentifier(key) === key ? key : stringify$8(key); output += `${i > 0 ? ',' : ''}${separator}${stringKey}:${indent ? ' ' : ''}${serialize(value, indent, baseIndent + indent)}`; } return `${output}${indent ? `\n${baseIndent}` : ''}}`; } function serialize(obj, indent, baseIndent) { if (typeof obj === 'object' && obj !== null) { if (Array.isArray(obj)) return serializeArray(obj, indent, baseIndent); if (obj instanceof Date) return `new Date(${obj.getTime()})`; if (obj instanceof RegExp) return obj.toString(); return serializeObject(obj, indent, baseIndent); } if (typeof obj === 'number') { if (obj === Infinity) return 'Infinity'; if (obj === -Infinity) return '-Infinity'; if (obj === 0) return 1 / obj === Infinity ? '0' : '-0'; if (obj !== obj) return 'NaN'; // eslint-disable-line no-self-compare } if (typeof obj === 'symbol') { const key = Symbol.keyFor(obj); // eslint-disable-next-line no-undefined if (key !== undefined) return `Symbol.for(${stringify$8(key)})`; } if (typeof obj === 'bigint') return `${obj}n`; return stringify$8(obj); } // isWellFormed exists from Node.js 20 const hasStringIsWellFormed = 'isWellFormed' in String.prototype; function isWellFormedString(input) { // @ts-expect-error String::isWellFormed exists from ES2024. tsconfig lib is set to ES6 if (hasStringIsWellFormed) return input.isWellFormed(); // https://github.com/tc39/proposal-is-usv-string/blob/main/README.md#algorithm return !/\p{Surrogate}/u.test(input); } const dataToEsm = function dataToEsm(data, options = {}) { var _a, _b; const t = options.compact ? '' : 'indent' in options ? options.indent : '\t'; const _ = options.compact ? '' : ' '; const n = options.compact ? '' : '\n'; const declarationType = options.preferConst ? 'const' : 'var'; if (options.namedExports === false || typeof data !== 'object' || Array.isArray(data) || data instanceof Date || data instanceof RegExp || data === null) { const code = serialize(data, options.compact ? null : t, ''); const magic = _ || (/^[{[\-\/]/.test(code) ? '' : ' '); // eslint-disable-line no-useless-escape return `export default${magic}${code};`; } let maxUnderbarPrefixLength = 0; for (const key of Object.keys(data)) { const underbarPrefixLength = (_b = (_a = key.match(/^(_+)/)) === null || _a === void 0 ? void 0 : _a[0].length) !== null && _b !== void 0 ? _b : 0; if (underbarPrefixLength > maxUnderbarPrefixLength) { maxUnderbarPrefixLength = underbarPrefixLength; } } const arbitraryNamePrefix = `${'_'.repeat(maxUnderbarPrefixLength + 1)}arbitrary`; let namedExportCode = ''; const defaultExportRows = []; const arbitraryNameExportRows = []; for (const [key, value] of Object.entries(data)) { if (key === makeLegalIdentifier(key)) { if (options.objectShorthand) defaultExportRows.push(key); else defaultExportRows.push(`${key}:${_}${key}`); namedExportCode += `export ${declarationType} ${key}${_}=${_}${serialize(value, options.compact ? null : t, '')};${n}`; } else { defaultExportRows.push(`${stringify$8(key)}:${_}${serialize(value, options.compact ? null : t, '')}`); if (options.includeArbitraryNames && isWellFormedString(key)) { const variableName = `${arbitraryNamePrefix}${arbitraryNameExportRows.length}`; namedExportCode += `${declarationType} ${variableName}${_}=${_}${serialize(value, options.compact ? null : t, '')};${n}`; arbitraryNameExportRows.push(`${variableName} as ${JSON.stringify(key)}`); } } } const arbitraryExportCode = arbitraryNameExportRows.length > 0 ? `export${_}{${n}${t}${arbitraryNameExportRows.join(`,${n}${t}`)}${n}};${n}` : ''; const defaultExportCode = `export default${_}{${n}${t}${defaultExportRows.join(`,${n}${t}`)}${n}};${n}`; return `${namedExportCode}${arbitraryExportCode}${defaultExportCode}`; }; var path$l = require$$0$4; var commondir = function (basedir, relfiles) { if (relfiles) { var files = relfiles.map(function (r) { return path$l.resolve(basedir, r); }); } else { var files = basedir; } var res = files.slice(1).reduce(function (ps, file) { if (!file.match(/^([A-Za-z]:)?\/|\\/)) { throw new Error('relative path without a basedir'); } var xs = file.split(/\/+|\\+/); for ( var i = 0; ps[i] === xs[i] && i < Math.min(ps.length, xs.length); i++ ); return ps.slice(0, i); }, files[0].split(/\/+|\\+/)); // Windows correctly handles paths with forward-slashes return res.length > 1 ? res.join('/') : '/' }; var getCommonDir = /*@__PURE__*/getDefaultExportFromCjs(commondir); var old$1 = {}; // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. var pathModule = require$$0$4; var isWindows$4 = process.platform === 'win32'; var fs$k = require$$0__default; // JavaScript implementation of realpath, ported from node pre-v6 var DEBUG$1 = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); function rethrow() { // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and // is fairly slow to generate. var callback; if (DEBUG$1) { var backtrace = new Error; callback = debugCallback; } else callback = missingCallback; return callback; function debugCallback(err) { if (err) { backtrace.message = err.message; err = backtrace; missingCallback(err); } } function missingCallback(err) { if (err) { if (process.throwDeprecation) throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs else if (!process.noDeprecation) { var msg = 'fs: missing callback ' + (err.stack || err.message); if (process.traceDeprecation) console.trace(msg); else console.error(msg); } } } } function maybeCallback(cb) { return typeof cb === 'function' ? cb : rethrow(); } // Regexp that finds the next partion of a (partial) path // result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] if (isWindows$4) { var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; } else { var nextPartRe = /(.*?)(?:[\/]+|$)/g; } // Regex to find the device root, including trailing slash. E.g. 'c:\\'. if (isWindows$4) { var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; } else { var splitRootRe = /^[\/]*/; } old$1.realpathSync = function realpathSync(p, cache) { // make p is absolute p = pathModule.resolve(p); if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { return cache[p]; } var original = p, seenLinks = {}, knownHard = {}; // current character position in p var pos; // the partial path so far, including a trailing slash if any var current; // the partial path without a trailing slash (except when pointing at a root) var base; // the partial path scanned in the previous round, with slash var previous; start(); function start() { // Skip over roots var m = splitRootRe.exec(p); pos = m[0].length; current = m[0]; base = m[0]; previous = ''; // On windows, check that the root exists. On unix there is no need. if (isWindows$4 && !knownHard[base]) { fs$k.lstatSync(base); knownHard[base] = true; } } // walk down the path, swapping out linked pathparts for their real // values // NB: p.length changes. while (pos < p.length) { // find the next part nextPartRe.lastIndex = pos; var result = nextPartRe.exec(p); previous = current; current += result[0]; base = previous + result[1]; pos = nextPartRe.lastIndex; // continue if not a symlink if (knownHard[base] || (cache && cache[base] === base)) { continue; } var resolvedLink; if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { // some known symbolic link. no need to stat again. resolvedLink = cache[base]; } else { var stat = fs$k.lstatSync(base); if (!stat.isSymbolicLink()) { knownHard[base] = true; if (cache) cache[base] = base; continue; } // read the link if it wasn't read before // dev/ino always return 0 on windows, so skip the check. var linkTarget = null; if (!isWindows$4) { var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); if (seenLinks.hasOwnProperty(id)) { linkTarget = seenLinks[id]; } } if (linkTarget === null) { fs$k.statSync(base); linkTarget = fs$k.readlinkSync(base); } resolvedLink = pathModule.resolve(previous, linkTarget); // track this, if given a cache. if (cache) cache[base] = resolvedLink; if (!isWindows$4) seenLinks[id] = linkTarget; } // resolve the link, then start over p = pathModule.resolve(resolvedLink, p.slice(pos)); start(); } if (cache) cache[original] = p; return p; }; old$1.realpath = function realpath(p, cache, cb) { if (typeof cb !== 'function') { cb = maybeCallback(cache); cache = null; } // make p is absolute p = pathModule.resolve(p); if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { return process.nextTick(cb.bind(null, null, cache[p])); } var original = p, seenLinks = {}, knownHard = {}; // current character position in p var pos; // the partial path so far, including a trailing slash if any var current; // the partial path without a trailing slash (except when pointing at a root) var base; // the partial path scanned in the previous round, with slash var previous; start(); function start() { // Skip over roots var m = splitRootRe.exec(p); pos = m[0].length; current = m[0]; base = m[0]; previous = ''; // On windows, check that the root exists. On unix there is no need. if (isWindows$4 && !knownHard[base]) { fs$k.lstat(base, function(err) { if (err) return cb(err); knownHard[base] = true; LOOP(); }); } else { process.nextTick(LOOP); } } // walk down the path, swapping out linked pathparts for their real // values function LOOP() { // stop if scanned past end of path if (pos >= p.length) { if (cache) cache[original] = p; return cb(null, p); } // find the next part nextPartRe.lastIndex = pos; var result = nextPartRe.exec(p); previous = current; current += result[0]; base = previous + result[1]; pos = nextPartRe.lastIndex; // continue if not a symlink if (knownHard[base] || (cache && cache[base] === base)) { return process.nextTick(LOOP); } if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { // known symbolic link. no need to stat again. return gotResolvedLink(cache[base]); } return fs$k.lstat(base, gotStat); } function gotStat(err, stat) { if (err) return cb(err); // if not a symlink, skip to the next path part if (!stat.isSymbolicLink()) { knownHard[base] = true; if (cache) cache[base] = base; return process.nextTick(LOOP); } // stat & read the link if not read before // call gotTarget as soon as the link target is known // dev/ino always return 0 on windows, so skip the check. if (!isWindows$4) { var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); if (seenLinks.hasOwnProperty(id)) { return gotTarget(null, seenLinks[id], base); } } fs$k.stat(base, function(err) { if (err) return cb(err); fs$k.readlink(base, function(err, target) { if (!isWindows$4) seenLinks[id] = target; gotTarget(err, target); }); }); } function gotTarget(err, target, base) { if (err) return cb(err); var resolvedLink = pathModule.resolve(previous, target); if (cache) cache[base] = resolvedLink; gotResolvedLink(resolvedLink); } function gotResolvedLink(resolvedLink) { // resolve the link, then start over p = pathModule.resolve(resolvedLink, p.slice(pos)); start(); } }; var fs_realpath = realpath$2; realpath$2.realpath = realpath$2; realpath$2.sync = realpathSync; realpath$2.realpathSync = realpathSync; realpath$2.monkeypatch = monkeypatch; realpath$2.unmonkeypatch = unmonkeypatch; var fs$j = require$$0__default; var origRealpath = fs$j.realpath; var origRealpathSync = fs$j.realpathSync; var version$4 = process.version; var ok = /^v[0-5]\./.test(version$4); var old = old$1; function newError (er) { return er && er.syscall === 'realpath' && ( er.code === 'ELOOP' || er.code === 'ENOMEM' || er.code === 'ENAMETOOLONG' ) } function realpath$2 (p, cache, cb) { if (ok) { return origRealpath(p, cache, cb) } if (typeof cache === 'function') { cb = cache; cache = null; } origRealpath(p, cache, function (er, result) { if (newError(er)) { old.realpath(p, cache, cb); } else { cb(er, result); } }); } function realpathSync (p, cache) { if (ok) { return origRealpathSync(p, cache) } try { return origRealpathSync(p, cache) } catch (er) { if (newError(er)) { return old.realpathSync(p, cache) } else { throw er } } } function monkeypatch () { fs$j.realpath = realpath$2; fs$j.realpathSync = realpathSync; } function unmonkeypatch () { fs$j.realpath = origRealpath; fs$j.realpathSync = origRealpathSync; } const isWindows$3 = typeof process === 'object' && process && process.platform === 'win32'; var path$k = isWindows$3 ? { sep: '\\' } : { sep: '/' }; var balancedMatch = balanced$1; function balanced$1(a, b, str) { if (a instanceof RegExp) a = maybeMatch(a, str); if (b instanceof RegExp) b = maybeMatch(b, str); var r = range$1(a, b, str); return r && { start: r[0], end: r[1], pre: str.slice(0, r[0]), body: str.slice(r[0] + a.length, r[1]), post: str.slice(r[1] + b.length) }; } function maybeMatch(reg, str) { var m = str.match(reg); return m ? m[0] : null; } balanced$1.range = range$1; function range$1(a, b, str) { var begs, beg, left, right, result; var ai = str.indexOf(a); var bi = str.indexOf(b, ai + 1); var i = ai; if (ai >= 0 && bi > 0) { if(a===b) { return [ai, bi]; } begs = []; left = str.length; while (i >= 0 && !result) { if (i == ai) { begs.push(i); ai = str.indexOf(a, i + 1); } else if (begs.length == 1) { result = [ begs.pop(), bi ]; } else { beg = begs.pop(); if (beg < left) { left = beg; right = bi; } bi = str.indexOf(b, i + 1); } i = ai < bi && ai >= 0 ? ai : bi; } if (begs.length) { result = [ left, right ]; } } return result; } var balanced = balancedMatch; var braceExpansion = expandTop; var escSlash = '\0SLASH'+Math.random()+'\0'; var escOpen = '\0OPEN'+Math.random()+'\0'; var escClose = '\0CLOSE'+Math.random()+'\0'; var escComma = '\0COMMA'+Math.random()+'\0'; var escPeriod = '\0PERIOD'+Math.random()+'\0'; function numeric(str) { return parseInt(str, 10) == str ? parseInt(str, 10) : str.charCodeAt(0); } function escapeBraces(str) { return str.split('\\\\').join(escSlash) .split('\\{').join(escOpen) .split('\\}').join(escClose) .split('\\,').join(escComma) .split('\\.').join(escPeriod); } function unescapeBraces(str) { return str.split(escSlash).join('\\') .split(escOpen).join('{') .split(escClose).join('}') .split(escComma).join(',') .split(escPeriod).join('.'); } // Basically just str.split(","), but handling cases // where we have nested braced sections, which should be // treated as individual members, like {a,{b,c},d} function parseCommaParts(str) { if (!str) return ['']; var parts = []; var m = balanced('{', '}', str); if (!m) return str.split(','); var pre = m.pre; var body = m.body; var post = m.post; var p = pre.split(','); p[p.length-1] += '{' + body + '}'; var postParts = parseCommaParts(post); if (post.length) { p[p.length-1] += postParts.shift(); p.push.apply(p, postParts); } parts.push.apply(parts, p); return parts; } function expandTop(str) { if (!str) return []; // I don't know why Bash 4.3 does this, but it does. // Anything starting with {} will have the first two bytes preserved // but *only* at the top level, so {},a}b will not expand to anything, // but a{},b}c will be expanded to [a}c,abc]. // One could argue that this is a bug in Bash, but since the goal of // this module is to match Bash's rules, we escape a leading {} if (str.substr(0, 2) === '{}') { str = '\\{\\}' + str.substr(2); } return expand$4(escapeBraces(str), true).map(unescapeBraces); } function embrace(str) { return '{' + str + '}'; } function isPadded(el) { return /^-?0\d/.test(el); } function lte(i, y) { return i <= y; } function gte(i, y) { return i >= y; } function expand$4(str, isTop) { var expansions = []; var m = balanced('{', '}', str); if (!m) return [str]; // no need to expand pre, since it is guaranteed to be free of brace-sets var pre = m.pre; var post = m.post.length ? expand$4(m.post, false) : ['']; if (/\$$/.test(m.pre)) { for (var k = 0; k < post.length; k++) { var expansion = pre+ '{' + m.body + '}' + post[k]; expansions.push(expansion); } } else { var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); var isSequence = isNumericSequence || isAlphaSequence; var isOptions = m.body.indexOf(',') >= 0; if (!isSequence && !isOptions) { // {a},b} if (m.post.match(/,.*\}/)) { str = m.pre + '{' + m.body + escClose + m.post; return expand$4(str); } return [str]; } var n; if (isSequence) { n = m.body.split(/\.\./); } else { n = parseCommaParts(m.body); if (n.length === 1) { // x{{a,b}}y ==> x{a}y x{b}y n = expand$4(n[0], false).map(embrace); if (n.length === 1) { return post.map(function(p) { return m.pre + n[0] + p; }); } } } // at this point, n is the parts, and we know it's not a comma set // with a single entry. var N; if (isSequence) { var x = numeric(n[0]); var y = numeric(n[1]); var width = Math.max(n[0].length, n[1].length); var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1; var test = lte; var reverse = y < x; if (reverse) { incr *= -1; test = gte; } var pad = n.some(isPadded); N = []; for (var i = x; test(i, y); i += incr) { var c; if (isAlphaSequence) { c = String.fromCharCode(i); if (c === '\\') c = ''; } else { c = String(i); if (pad) { var need = width - c.length; if (need > 0) { var z = new Array(need + 1).join('0'); if (i < 0) c = '-' + z + c.slice(1); else c = z + c; } } } N.push(c); } } else { N = []; for (var j = 0; j < n.length; j++) { N.push.apply(N, expand$4(n[j], false)); } } for (var j = 0; j < N.length; j++) { for (var k = 0; k < post.length; k++) { var expansion = pre + N[j] + post[k]; if (!isTop || isSequence || expansion) expansions.push(expansion); } } } return expansions; } const minimatch$1 = minimatch_1 = (p, pattern, options = {}) => { assertValidPattern(pattern); // shortcut: comments match nothing. if (!options.nocomment && pattern.charAt(0) === '#') { return false } return new Minimatch$1(pattern, options).match(p) }; var minimatch_1 = minimatch$1; const path$j = path$k; minimatch$1.sep = path$j.sep; const GLOBSTAR$2 = Symbol('globstar **'); minimatch$1.GLOBSTAR = GLOBSTAR$2; const expand$3 = braceExpansion; const plTypes = { '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, '?': { open: '(?:', close: ')?' }, '+': { open: '(?:', close: ')+' }, '*': { open: '(?:', close: ')*' }, '@': { open: '(?:', close: ')' } }; // any single thing other than / // don't need to escape / when using new RegExp() const qmark = '[^/]'; // * => any number of characters const star = qmark + '*?'; // ** when dots are allowed. Anything goes, except .. and . // not (^ or / followed by one or two dots followed by $ or /), // followed by anything, any number of times. const twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'; // not a ^ or / followed by a dot, // followed by anything, any number of times. const twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'; // "abc" -> { a:true, b:true, c:true } const charSet = s => s.split('').reduce((set, c) => { set[c] = true; return set }, {}); // characters that need to be escaped in RegExp. const reSpecials = charSet('().*{}+?[]^$\\!'); // characters that indicate we have to add the pattern start const addPatternStartSet = charSet('[.('); // normalizes slashes. const slashSplit = /\/+/; minimatch$1.filter = (pattern, options = {}) => (p, i, list) => minimatch$1(p, pattern, options); const ext = (a, b = {}) => { const t = {}; Object.keys(a).forEach(k => t[k] = a[k]); Object.keys(b).forEach(k => t[k] = b[k]); return t }; minimatch$1.defaults = def => { if (!def || typeof def !== 'object' || !Object.keys(def).length) { return minimatch$1 } const orig = minimatch$1; const m = (p, pattern, options) => orig(p, pattern, ext(def, options)); m.Minimatch = class Minimatch extends orig.Minimatch { constructor (pattern, options) { super(pattern, ext(def, options)); } }; m.Minimatch.defaults = options => orig.defaults(ext(def, options)).Minimatch; m.filter = (pattern, options) => orig.filter(pattern, ext(def, options)); m.defaults = options => orig.defaults(ext(def, options)); m.makeRe = (pattern, options) => orig.makeRe(pattern, ext(def, options)); m.braceExpand = (pattern, options) => orig.braceExpand(pattern, ext(def, options)); m.match = (list, pattern, options) => orig.match(list, pattern, ext(def, options)); return m }; // Brace expansion: // a{b,c}d -> abd acd // a{b,}c -> abc ac // a{0..3}d -> a0d a1d a2d a3d // a{b,c{d,e}f}g -> abg acdfg acefg // a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg // // Invalid sets are not expanded. // a{2..}b -> a{2..}b // a{b}c -> a{b}c minimatch$1.braceExpand = (pattern, options) => braceExpand(pattern, options); const braceExpand = (pattern, options = {}) => { assertValidPattern(pattern); // Thanks to Yeting Li for // improving this regexp to avoid a ReDOS vulnerability. if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { // shortcut. no need to expand. return [pattern] } return expand$3(pattern) }; const MAX_PATTERN_LENGTH = 1024 * 64; const assertValidPattern = pattern => { if (typeof pattern !== 'string') { throw new TypeError('invalid pattern') } if (pattern.length > MAX_PATTERN_LENGTH) { throw new TypeError('pattern is too long') } }; // parse a component of the expanded set. // At this point, no pattern may contain "/" in it // so we're going to return a 2d array, where each entry is the full // pattern, split on '/', and then turned into a regular expression. // A regexp is made at the end which joins each array with an // escaped /, and another full one which joins each regexp with |. // // Following the lead of Bash 4.1, note that "**" only has special meaning // when it is the *only* thing in a path portion. Otherwise, any series // of * is equivalent to a single *. Globstar behavior is enabled by // default, and can be disabled by setting options.noglobstar. const SUBPARSE = Symbol('subparse'); minimatch$1.makeRe = (pattern, options) => new Minimatch$1(pattern, options || {}).makeRe(); minimatch$1.match = (list, pattern, options = {}) => { const mm = new Minimatch$1(pattern, options); list = list.filter(f => mm.match(f)); if (mm.options.nonull && !list.length) { list.push(pattern); } return list }; // replace stuff like \* with * const globUnescape = s => s.replace(/\\(.)/g, '$1'); const charUnescape = s => s.replace(/\\([^-\]])/g, '$1'); const regExpEscape = s => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); const braExpEscape = s => s.replace(/[[\]\\]/g, '\\$&'); let Minimatch$1 = class Minimatch { constructor (pattern, options) { assertValidPattern(pattern); if (!options) options = {}; this.options = options; this.set = []; this.pattern = pattern; this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; if (this.windowsPathsNoEscape) { this.pattern = this.pattern.replace(/\\/g, '/'); } this.regexp = null; this.negate = false; this.comment = false; this.empty = false; this.partial = !!options.partial; // make the set of regexps etc. this.make(); } debug () {} make () { const pattern = this.pattern; const options = this.options; // empty patterns and comments match nothing. if (!options.nocomment && pattern.charAt(0) === '#') { this.comment = true; return } if (!pattern) { this.empty = true; return } // step 1: figure out negation, etc. this.parseNegate(); // step 2: expand braces let set = this.globSet = this.braceExpand(); if (options.debug) this.debug = (...args) => console.error(...args); this.debug(this.pattern, set); // step 3: now we have a set, so turn each one into a series of path-portion // matching patterns. // These will be regexps, except in the case of "**", which is // set to the GLOBSTAR object for globstar behavior, // and will not contain any / characters set = this.globParts = set.map(s => s.split(slashSplit)); this.debug(this.pattern, set); // glob --> regexps set = set.map((s, si, set) => s.map(this.parse, this)); this.debug(this.pattern, set); // filter out everything that didn't compile properly. set = set.filter(s => s.indexOf(false) === -1); this.debug(this.pattern, set); this.set = set; } parseNegate () { if (this.options.nonegate) return const pattern = this.pattern; let negate = false; let negateOffset = 0; for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) { negate = !negate; negateOffset++; } if (negateOffset) this.pattern = pattern.slice(negateOffset); this.negate = negate; } // set partial to true to test if, for example, // "/a/b" matches the start of "/*/b/*/d" // Partial means, if you run out of file before you run // out of pattern, then that's fine, as long as all // the parts match. matchOne (file, pattern, partial) { var options = this.options; this.debug('matchOne', { 'this': this, file: file, pattern: pattern }); this.debug('matchOne', file.length, pattern.length); for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length ; (fi < fl) && (pi < pl) ; fi++, pi++) { this.debug('matchOne loop'); var p = pattern[pi]; var f = file[fi]; this.debug(pattern, p, f); // should be impossible. // some invalid regexp stuff in the set. /* istanbul ignore if */ if (p === false) return false if (p === GLOBSTAR$2) { this.debug('GLOBSTAR', [pattern, p, f]); // "**" // a/**/b/**/c would match the following: // a/b/x/y/z/c // a/x/y/z/b/c // a/b/x/b/x/c // a/b/c // To do this, take the rest of the pattern after // the **, and see if it would match the file remainder. // If so, return success. // If not, the ** "swallows" a segment, and try again. // This is recursively awful. // // a/**/b/**/c matching a/b/x/y/z/c // - a matches a // - doublestar // - matchOne(b/x/y/z/c, b/**/c) // - b matches b // - doublestar // - matchOne(x/y/z/c, c) -> no // - matchOne(y/z/c, c) -> no // - matchOne(z/c, c) -> no // - matchOne(c, c) yes, hit var fr = fi; var pr = pi + 1; if (pr === pl) { this.debug('** at the end'); // a ** at the end will just swallow the rest. // We have found a match. // however, it will not swallow /.x, unless // options.dot is set. // . and .. are *never* matched by **, for explosively // exponential reasons. for (; fi < fl; fi++) { if (file[fi] === '.' || file[fi] === '..' || (!options.dot && file[fi].charAt(0) === '.')) return false } return true } // ok, let's see if we can swallow whatever we can. while (fr < fl) { var swallowee = file[fr]; this.debug('\nglobstar while', file, fr, pattern, pr, swallowee); // XXX remove this slice. Just pass the start index. if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { this.debug('globstar found match!', fr, fl, swallowee); // found a match. return true } else { // can't swallow "." or ".." ever. // can only swallow ".foo" when explicitly asked. if (swallowee === '.' || swallowee === '..' || (!options.dot && swallowee.charAt(0) === '.')) { this.debug('dot detected!', file, fr, pattern, pr); break } // ** swallows a segment, and continue. this.debug('globstar swallow a segment, and continue'); fr++; } } // no match was found. // However, in partial mode, we can't say this is necessarily over. // If there's more *pattern* left, then /* istanbul ignore if */ if (partial) { // ran out of file this.debug('\n>>> no match, partial?', file, fr, pattern, pr); if (fr === fl) return true } return false } // something other than ** // non-magic patterns just have to match exactly // patterns with magic have been turned into regexps. var hit; if (typeof p === 'string') { hit = f === p; this.debug('string match', p, f, hit); } else { hit = f.match(p); this.debug('pattern match', p, f, hit); } if (!hit) return false } // Note: ending in / means that we'll get a final "" // at the end of the pattern. This can only match a // corresponding "" at the end of the file. // If the file ends in /, then it can only match a // a pattern that ends in /, unless the pattern just // doesn't have any more for it. But, a/b/ should *not* // match "a/b/*", even though "" matches against the // [^/]*? pattern, except in partial mode, where it might // simply not be reached yet. // However, a/b/ should still satisfy a/* // now either we fell off the end of the pattern, or we're done. if (fi === fl && pi === pl) { // ran out of pattern and filename at the same time. // an exact hit! return true } else if (fi === fl) { // ran out of file, but still had pattern left. // this is ok if we're doing the match as part of // a glob fs traversal. return partial } else /* istanbul ignore else */ if (pi === pl) { // ran out of pattern, still have file left. // this is only acceptable if we're on the very last // empty segment of a file with a trailing slash. // a/* should match a/b/ return (fi === fl - 1) && (file[fi] === '') } // should be unreachable. /* istanbul ignore next */ throw new Error('wtf?') } braceExpand () { return braceExpand(this.pattern, this.options) } parse (pattern, isSub) { assertValidPattern(pattern); const options = this.options; // shortcuts if (pattern === '**') { if (!options.noglobstar) return GLOBSTAR$2 else pattern = '*'; } if (pattern === '') return '' let re = ''; let hasMagic = false; let escaping = false; // ? => one single character const patternListStack = []; const negativeLists = []; let stateChar; let inClass = false; let reClassStart = -1; let classStart = -1; let cs; let pl; let sp; // . and .. never match anything that doesn't start with ., // even when options.dot is set. However, if the pattern // starts with ., then traversal patterns can match. let dotTravAllowed = pattern.charAt(0) === '.'; let dotFileAllowed = options.dot || dotTravAllowed; const patternStart = () => dotTravAllowed ? '' : dotFileAllowed ? '(?!(?:^|\\/)\\.{1,2}(?:$|\\/))' : '(?!\\.)'; const subPatternStart = (p) => p.charAt(0) === '.' ? '' : options.dot ? '(?!(?:^|\\/)\\.{1,2}(?:$|\\/))' : '(?!\\.)'; const clearStateChar = () => { if (stateChar) { // we had some state-tracking character // that wasn't consumed by this pass. switch (stateChar) { case '*': re += star; hasMagic = true; break case '?': re += qmark; hasMagic = true; break default: re += '\\' + stateChar; break } this.debug('clearStateChar %j %j', stateChar, re); stateChar = false; } }; for (let i = 0, c; (i < pattern.length) && (c = pattern.charAt(i)); i++) { this.debug('%s\t%s %s %j', pattern, i, re, c); // skip over any that are escaped. if (escaping) { /* istanbul ignore next - completely not allowed, even escaped. */ if (c === '/') { return false } if (reSpecials[c]) { re += '\\'; } re += c; escaping = false; continue } switch (c) { /* istanbul ignore next */ case '/': { // Should already be path-split by now. return false } case '\\': if (inClass && pattern.charAt(i + 1) === '-') { re += c; continue } clearStateChar(); escaping = true; continue // the various stateChar values // for the "extglob" stuff. case '?': case '*': case '+': case '@': case '!': this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c); // all of those are literals inside a class, except that // the glob [!a] means [^a] in regexp if (inClass) { this.debug(' in class'); if (c === '!' && i === classStart + 1) c = '^'; re += c; continue } // if we already have a stateChar, then it means // that there was something like ** or +? in there. // Handle the stateChar, then proceed with this one. this.debug('call clearStateChar %j', stateChar); clearStateChar(); stateChar = c; // if extglob is disabled, then +(asdf|foo) isn't a thing. // just clear the statechar *now*, rather than even diving into // the patternList stuff. if (options.noext) clearStateChar(); continue case '(': { if (inClass) { re += '('; continue } if (!stateChar) { re += '\\('; continue } const plEntry = { type: stateChar, start: i - 1, reStart: re.length, open: plTypes[stateChar].open, close: plTypes[stateChar].close, }; this.debug(this.pattern, '\t', plEntry); patternListStack.push(plEntry); // negation is (?:(?!(?:js)(?:))[^/]*) re += plEntry.open; // next entry starts with a dot maybe? if (plEntry.start === 0 && plEntry.type !== '!') { dotTravAllowed = true; re += subPatternStart(pattern.slice(i + 1)); } this.debug('plType %j %j', stateChar, re); stateChar = false; continue } case ')': { const plEntry = patternListStack[patternListStack.length - 1]; if (inClass || !plEntry) { re += '\\)'; continue } patternListStack.pop(); // closing an extglob clearStateChar(); hasMagic = true; pl = plEntry; // negation is (?:(?!js)[^/]*) // The others are (?:) re += pl.close; if (pl.type === '!') { negativeLists.push(Object.assign(pl, { reEnd: re.length })); } continue } case '|': { const plEntry = patternListStack[patternListStack.length - 1]; if (inClass || !plEntry) { re += '\\|'; continue } clearStateChar(); re += '|'; // next subpattern can start with a dot? if (plEntry.start === 0 && plEntry.type !== '!') { dotTravAllowed = true; re += subPatternStart(pattern.slice(i + 1)); } continue } // these are mostly the same in regexp and glob case '[': // swallow any state-tracking char before the [ clearStateChar(); if (inClass) { re += '\\' + c; continue } inClass = true; classStart = i; reClassStart = re.length; re += c; continue case ']': // a right bracket shall lose its special // meaning and represent itself in // a bracket expression if it occurs // first in the list. -- POSIX.2 2.8.3.2 if (i === classStart + 1 || !inClass) { re += '\\' + c; continue } // split where the last [ was, make sure we don't have // an invalid re. if so, re-walk the contents of the // would-be class to re-translate any characters that // were passed through as-is // TODO: It would probably be faster to determine this // without a try/catch and a new RegExp, but it's tricky // to do safely. For now, this is safe and works. cs = pattern.substring(classStart + 1, i); try { RegExp('[' + braExpEscape(charUnescape(cs)) + ']'); // looks good, finish up the class. re += c; } catch (er) { // out of order ranges in JS are errors, but in glob syntax, // they're just a range that matches nothing. re = re.substring(0, reClassStart) + '(?:$.)'; // match nothing ever } hasMagic = true; inClass = false; continue default: // swallow any state char that wasn't consumed clearStateChar(); if (reSpecials[c] && !(c === '^' && inClass)) { re += '\\'; } re += c; break } // switch } // for // handle the case where we left a class open. // "[abc" is valid, equivalent to "\[abc" if (inClass) { // split where the last [ was, and escape it // this is a huge pita. We now have to re-walk // the contents of the would-be class to re-translate // any characters that were passed through as-is cs = pattern.slice(classStart + 1); sp = this.parse(cs, SUBPARSE); re = re.substring(0, reClassStart) + '\\[' + sp[0]; hasMagic = hasMagic || sp[1]; } // handle the case where we had a +( thing at the *end* // of the pattern. // each pattern list stack adds 3 chars, and we need to go through // and escape any | chars that were passed through as-is for the regexp. // Go through and escape them, taking care not to double-escape any // | chars that were already escaped. for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { let tail; tail = re.slice(pl.reStart + pl.open.length); this.debug('setting tail', re, pl); // maybe some even number of \, then maybe 1 \, followed by a | tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, (_, $1, $2) => { /* istanbul ignore else - should already be done */ if (!$2) { // the | isn't already escaped, so escape it. $2 = '\\'; } // need to escape all those slashes *again*, without escaping the // one that we need for escaping the | character. As it works out, // escaping an even number of slashes can be done by simply repeating // it exactly after itself. That's why this trick works. // // I am sorry that you have to see this. return $1 + $1 + $2 + '|' }); this.debug('tail=%j\n %s', tail, tail, pl, re); const t = pl.type === '*' ? star : pl.type === '?' ? qmark : '\\' + pl.type; hasMagic = true; re = re.slice(0, pl.reStart) + t + '\\(' + tail; } // handle trailing things that only matter at the very end. clearStateChar(); if (escaping) { // trailing \\ re += '\\\\'; } // only need to apply the nodot start if the re starts with // something that could conceivably capture a dot const addPatternStart = addPatternStartSet[re.charAt(0)]; // Hack to work around lack of negative lookbehind in JS // A pattern like: *.!(x).!(y|z) needs to ensure that a name // like 'a.xyz.yz' doesn't match. So, the first negative // lookahead, has to look ALL the way ahead, to the end of // the pattern. for (let n = negativeLists.length - 1; n > -1; n--) { const nl = negativeLists[n]; const nlBefore = re.slice(0, nl.reStart); const nlFirst = re.slice(nl.reStart, nl.reEnd - 8); let nlAfter = re.slice(nl.reEnd); const nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + nlAfter; // Handle nested stuff like *(*.js|!(*.json)), where open parens // mean that we should *not* include the ) in the bit that is considered // "after" the negated section. const closeParensBefore = nlBefore.split(')').length; const openParensBefore = nlBefore.split('(').length - closeParensBefore; let cleanAfter = nlAfter; for (let i = 0; i < openParensBefore; i++) { cleanAfter = cleanAfter.replace(/\)[+*?]?/, ''); } nlAfter = cleanAfter; const dollar = nlAfter === '' && isSub !== SUBPARSE ? '(?:$|\\/)' : ''; re = nlBefore + nlFirst + nlAfter + dollar + nlLast; } // if the re is not "" at this point, then we need to make sure // it doesn't match against an empty path part. // Otherwise a/* will match a/, which it should not. if (re !== '' && hasMagic) { re = '(?=.)' + re; } if (addPatternStart) { re = patternStart() + re; } // parsing just a piece of a larger pattern. if (isSub === SUBPARSE) { return [re, hasMagic] } // if it's nocase, and the lcase/uppercase don't match, it's magic if (options.nocase && !hasMagic) { hasMagic = pattern.toUpperCase() !== pattern.toLowerCase(); } // skip the regexp for non-magical patterns // unescape anything in it, though, so that it'll be // an exact match against a file etc. if (!hasMagic) { return globUnescape(pattern) } const flags = options.nocase ? 'i' : ''; try { return Object.assign(new RegExp('^' + re + '$', flags), { _glob: pattern, _src: re, }) } catch (er) /* istanbul ignore next - should be impossible */ { // If it was an invalid regular expression, then it can't match // anything. This trick looks for a character after the end of // the string, which is of course impossible, except in multi-line // mode, but it's not a /m regex. return new RegExp('$.') } } makeRe () { if (this.regexp || this.regexp === false) return this.regexp // at this point, this.set is a 2d array of partial // pattern strings, or "**". // // It's better to use .match(). This function shouldn't // be used, really, but it's pretty convenient sometimes, // when you just want to work with a regex. const set = this.set; if (!set.length) { this.regexp = false; return this.regexp } const options = this.options; const twoStar = options.noglobstar ? star : options.dot ? twoStarDot : twoStarNoDot; const flags = options.nocase ? 'i' : ''; // coalesce globstars and regexpify non-globstar patterns // if it's the only item, then we just do one twoStar // if it's the first, and there are more, prepend (\/|twoStar\/)? to next // if it's the last, append (\/twoStar|) to previous // if it's in the middle, append (\/|\/twoStar\/) to previous // then filter out GLOBSTAR symbols let re = set.map(pattern => { pattern = pattern.map(p => typeof p === 'string' ? regExpEscape(p) : p === GLOBSTAR$2 ? GLOBSTAR$2 : p._src ).reduce((set, p) => { if (!(set[set.length - 1] === GLOBSTAR$2 && p === GLOBSTAR$2)) { set.push(p); } return set }, []); pattern.forEach((p, i) => { if (p !== GLOBSTAR$2 || pattern[i-1] === GLOBSTAR$2) { return } if (i === 0) { if (pattern.length > 1) { pattern[i+1] = '(?:\\\/|' + twoStar + '\\\/)?' + pattern[i+1]; } else { pattern[i] = twoStar; } } else if (i === pattern.length - 1) { pattern[i-1] += '(?:\\\/|' + twoStar + ')?'; } else { pattern[i-1] += '(?:\\\/|\\\/' + twoStar + '\\\/)' + pattern[i+1]; pattern[i+1] = GLOBSTAR$2; } }); return pattern.filter(p => p !== GLOBSTAR$2).join('/') }).join('|'); // must match entire pattern // ending in a * or ** will make it less strict. re = '^(?:' + re + ')$'; // can match anything, as long as it's not this. if (this.negate) re = '^(?!' + re + ').*$'; try { this.regexp = new RegExp(re, flags); } catch (ex) /* istanbul ignore next - should be impossible */ { this.regexp = false; } return this.regexp } match (f, partial = this.partial) { this.debug('match', f, this.pattern); // short-circuit in the case of busted things. // comments, etc. if (this.comment) return false if (this.empty) return f === '' if (f === '/' && partial) return true const options = this.options; // windows: need to use /, not \ if (path$j.sep !== '/') { f = f.split(path$j.sep).join('/'); } // treat the test path as a set of pathparts. f = f.split(slashSplit); this.debug(this.pattern, 'split', f); // just ONE of the pattern sets in this.set needs to match // in order for it to be valid. If negating, then just one // match means that we have failed. // Either way, return on the first hit. const set = this.set; this.debug(this.pattern, 'set', set); // Find the basename of the path by looking for the last non-empty segment let filename; for (let i = f.length - 1; i >= 0; i--) { filename = f[i]; if (filename) break } for (let i = 0; i < set.length; i++) { const pattern = set[i]; let file = f; if (options.matchBase && pattern.length === 1) { file = [filename]; } const hit = this.matchOne(file, pattern, partial); if (hit) { if (options.flipNegate) return true return !this.negate } } // didn't get any hits. this is success if it's a negative // pattern, failure otherwise. if (options.flipNegate) return false return this.negate } static defaults (def) { return minimatch$1.defaults(def).Minimatch } }; minimatch$1.Minimatch = Minimatch$1; var inherits = {exports: {}}; var inherits_browser = {exports: {}}; var hasRequiredInherits_browser; function requireInherits_browser () { if (hasRequiredInherits_browser) return inherits_browser.exports; hasRequiredInherits_browser = 1; if (typeof Object.create === 'function') { // implementation from standard node.js 'util' module inherits_browser.exports = function inherits(ctor, superCtor) { if (superCtor) { ctor.super_ = superCtor; ctor.prototype = Object.create(superCtor.prototype, { constructor: { value: ctor, enumerable: false, writable: true, configurable: true } }); } }; } else { // old school shim for old browsers inherits_browser.exports = function inherits(ctor, superCtor) { if (superCtor) { ctor.super_ = superCtor; var TempCtor = function () {}; TempCtor.prototype = superCtor.prototype; ctor.prototype = new TempCtor(); ctor.prototype.constructor = ctor; } }; } return inherits_browser.exports; } try { var util$2 = require('util'); /* istanbul ignore next */ if (typeof util$2.inherits !== 'function') throw ''; inherits.exports = util$2.inherits; } catch (e) { /* istanbul ignore next */ inherits.exports = requireInherits_browser(); } var inheritsExports = inherits.exports; var common$c = {}; common$c.setopts = setopts; common$c.ownProp = ownProp; common$c.makeAbs = makeAbs; common$c.finish = finish; common$c.mark = mark; common$c.isIgnored = isIgnored; common$c.childrenIgnored = childrenIgnored; function ownProp (obj, field) { return Object.prototype.hasOwnProperty.call(obj, field) } var fs$i = require$$0__default; var path$i = require$$0$4; var minimatch = minimatch_1; var isAbsolute = require$$0$4.isAbsolute; var Minimatch = minimatch.Minimatch; function alphasort (a, b) { return a.localeCompare(b, 'en') } function setupIgnores (self, options) { self.ignore = options.ignore || []; if (!Array.isArray(self.ignore)) self.ignore = [self.ignore]; if (self.ignore.length) { self.ignore = self.ignore.map(ignoreMap); } } // ignore patterns are always in dot:true mode. function ignoreMap (pattern) { var gmatcher = null; if (pattern.slice(-3) === '/**') { var gpattern = pattern.replace(/(\/\*\*)+$/, ''); gmatcher = new Minimatch(gpattern, { dot: true }); } return { matcher: new Minimatch(pattern, { dot: true }), gmatcher: gmatcher } } function setopts (self, pattern, options) { if (!options) options = {}; // base-matching: just use globstar for that. if (options.matchBase && -1 === pattern.indexOf("/")) { if (options.noglobstar) { throw new Error("base matching requires globstar") } pattern = "**/" + pattern; } self.windowsPathsNoEscape = !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; if (self.windowsPathsNoEscape) { pattern = pattern.replace(/\\/g, '/'); } self.silent = !!options.silent; self.pattern = pattern; self.strict = options.strict !== false; self.realpath = !!options.realpath; self.realpathCache = options.realpathCache || Object.create(null); self.follow = !!options.follow; self.dot = !!options.dot; self.mark = !!options.mark; self.nodir = !!options.nodir; if (self.nodir) self.mark = true; self.sync = !!options.sync; self.nounique = !!options.nounique; self.nonull = !!options.nonull; self.nosort = !!options.nosort; self.nocase = !!options.nocase; self.stat = !!options.stat; self.noprocess = !!options.noprocess; self.absolute = !!options.absolute; self.fs = options.fs || fs$i; self.maxLength = options.maxLength || Infinity; self.cache = options.cache || Object.create(null); self.statCache = options.statCache || Object.create(null); self.symlinks = options.symlinks || Object.create(null); setupIgnores(self, options); self.changedCwd = false; var cwd = process.cwd(); if (!ownProp(options, "cwd")) self.cwd = path$i.resolve(cwd); else { self.cwd = path$i.resolve(options.cwd); self.changedCwd = self.cwd !== cwd; } self.root = options.root || path$i.resolve(self.cwd, "/"); self.root = path$i.resolve(self.root); // TODO: is an absolute `cwd` supposed to be resolved against `root`? // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd); self.nomount = !!options.nomount; if (process.platform === "win32") { self.root = self.root.replace(/\\/g, "/"); self.cwd = self.cwd.replace(/\\/g, "/"); self.cwdAbs = self.cwdAbs.replace(/\\/g, "/"); } // disable comments and negation in Minimatch. // Note that they are not supported in Glob itself anyway. options.nonegate = true; options.nocomment = true; self.minimatch = new Minimatch(pattern, options); self.options = self.minimatch.options; } function finish (self) { var nou = self.nounique; var all = nou ? [] : Object.create(null); for (var i = 0, l = self.matches.length; i < l; i ++) { var matches = self.matches[i]; if (!matches || Object.keys(matches).length === 0) { if (self.nonull) { // do like the shell, and spit out the literal glob var literal = self.minimatch.globSet[i]; if (nou) all.push(literal); else all[literal] = true; } } else { // had matches var m = Object.keys(matches); if (nou) all.push.apply(all, m); else m.forEach(function (m) { all[m] = true; }); } } if (!nou) all = Object.keys(all); if (!self.nosort) all = all.sort(alphasort); // at *some* point we statted all of these if (self.mark) { for (var i = 0; i < all.length; i++) { all[i] = self._mark(all[i]); } if (self.nodir) { all = all.filter(function (e) { var notDir = !(/\/$/.test(e)); var c = self.cache[e] || self.cache[makeAbs(self, e)]; if (notDir && c) notDir = c !== 'DIR' && !Array.isArray(c); return notDir }); } } if (self.ignore.length) all = all.filter(function(m) { return !isIgnored(self, m) }); self.found = all; } function mark (self, p) { var abs = makeAbs(self, p); var c = self.cache[abs]; var m = p; if (c) { var isDir = c === 'DIR' || Array.isArray(c); var slash = p.slice(-1) === '/'; if (isDir && !slash) m += '/'; else if (!isDir && slash) m = m.slice(0, -1); if (m !== p) { var mabs = makeAbs(self, m); self.statCache[mabs] = self.statCache[abs]; self.cache[mabs] = self.cache[abs]; } } return m } // lotta situps... function makeAbs (self, f) { var abs = f; if (f.charAt(0) === '/') { abs = path$i.join(self.root, f); } else if (isAbsolute(f) || f === '') { abs = f; } else if (self.changedCwd) { abs = path$i.resolve(self.cwd, f); } else { abs = path$i.resolve(f); } if (process.platform === 'win32') abs = abs.replace(/\\/g, '/'); return abs } // Return true, if pattern ends with globstar '**', for the accompanying parent directory. // Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents function isIgnored (self, path) { if (!self.ignore.length) return false return self.ignore.some(function(item) { return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) }) } function childrenIgnored (self, path) { if (!self.ignore.length) return false return self.ignore.some(function(item) { return !!(item.gmatcher && item.gmatcher.match(path)) }) } var sync$9; var hasRequiredSync; function requireSync () { if (hasRequiredSync) return sync$9; hasRequiredSync = 1; sync$9 = globSync; globSync.GlobSync = GlobSync; var rp = fs_realpath; var minimatch = minimatch_1; requireGlob().Glob; var path = require$$0$4; var assert = require$$5; var isAbsolute = require$$0$4.isAbsolute; var common = common$c; var setopts = common.setopts; var ownProp = common.ownProp; var childrenIgnored = common.childrenIgnored; var isIgnored = common.isIgnored; function globSync (pattern, options) { if (typeof options === 'function' || arguments.length === 3) throw new TypeError('callback provided to sync glob\n'+ 'See: https://github.com/isaacs/node-glob/issues/167') return new GlobSync(pattern, options).found } function GlobSync (pattern, options) { if (!pattern) throw new Error('must provide pattern') if (typeof options === 'function' || arguments.length === 3) throw new TypeError('callback provided to sync glob\n'+ 'See: https://github.com/isaacs/node-glob/issues/167') if (!(this instanceof GlobSync)) return new GlobSync(pattern, options) setopts(this, pattern, options); if (this.noprocess) return this var n = this.minimatch.set.length; this.matches = new Array(n); for (var i = 0; i < n; i ++) { this._process(this.minimatch.set[i], i, false); } this._finish(); } GlobSync.prototype._finish = function () { assert.ok(this instanceof GlobSync); if (this.realpath) { var self = this; this.matches.forEach(function (matchset, index) { var set = self.matches[index] = Object.create(null); for (var p in matchset) { try { p = self._makeAbs(p); var real = rp.realpathSync(p, self.realpathCache); set[real] = true; } catch (er) { if (er.syscall === 'stat') set[self._makeAbs(p)] = true; else throw er } } }); } common.finish(this); }; GlobSync.prototype._process = function (pattern, index, inGlobStar) { assert.ok(this instanceof GlobSync); // Get the first [n] parts of pattern that are all strings. var n = 0; while (typeof pattern[n] === 'string') { n ++; } // now n is the index of the first one that is *not* a string. // See if there's anything else var prefix; switch (n) { // if not, then this is rather simple case pattern.length: this._processSimple(pattern.join('/'), index); return case 0: // pattern *starts* with some non-trivial item. // going to readdir(cwd), but not include the prefix in matches. prefix = null; break default: // pattern has some string bits in the front. // whatever it starts with, whether that's 'absolute' like /foo/bar, // or 'relative' like '../baz' prefix = pattern.slice(0, n).join('/'); break } var remain = pattern.slice(n); // get the list of entries. var read; if (prefix === null) read = '.'; else if (isAbsolute(prefix) || isAbsolute(pattern.map(function (p) { return typeof p === 'string' ? p : '[*]' }).join('/'))) { if (!prefix || !isAbsolute(prefix)) prefix = '/' + prefix; read = prefix; } else read = prefix; var abs = this._makeAbs(read); //if ignored, skip processing if (childrenIgnored(this, read)) return var isGlobStar = remain[0] === minimatch.GLOBSTAR; if (isGlobStar) this._processGlobStar(prefix, read, abs, remain, index, inGlobStar); else this._processReaddir(prefix, read, abs, remain, index, inGlobStar); }; GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { var entries = this._readdir(abs, inGlobStar); // if the abs isn't a dir, then nothing can match! if (!entries) return // It will only match dot entries if it starts with a dot, or if // dot is set. Stuff like @(.foo|.bar) isn't allowed. var pn = remain[0]; var negate = !!this.minimatch.negate; var rawGlob = pn._glob; var dotOk = this.dot || rawGlob.charAt(0) === '.'; var matchedEntries = []; for (var i = 0; i < entries.length; i++) { var e = entries[i]; if (e.charAt(0) !== '.' || dotOk) { var m; if (negate && !prefix) { m = !e.match(pn); } else { m = e.match(pn); } if (m) matchedEntries.push(e); } } var len = matchedEntries.length; // If there are no matched entries, then nothing matches. if (len === 0) return // if this is the last remaining pattern bit, then no need for // an additional stat *unless* the user has specified mark or // stat explicitly. We know they exist, since readdir returned // them. if (remain.length === 1 && !this.mark && !this.stat) { if (!this.matches[index]) this.matches[index] = Object.create(null); for (var i = 0; i < len; i ++) { var e = matchedEntries[i]; if (prefix) { if (prefix.slice(-1) !== '/') e = prefix + '/' + e; else e = prefix + e; } if (e.charAt(0) === '/' && !this.nomount) { e = path.join(this.root, e); } this._emitMatch(index, e); } // This was the last one, and no stats were needed return } // now test all matched entries as stand-ins for that part // of the pattern. remain.shift(); for (var i = 0; i < len; i ++) { var e = matchedEntries[i]; var newPattern; if (prefix) newPattern = [prefix, e]; else newPattern = [e]; this._process(newPattern.concat(remain), index, inGlobStar); } }; GlobSync.prototype._emitMatch = function (index, e) { if (isIgnored(this, e)) return var abs = this._makeAbs(e); if (this.mark) e = this._mark(e); if (this.absolute) { e = abs; } if (this.matches[index][e]) return if (this.nodir) { var c = this.cache[abs]; if (c === 'DIR' || Array.isArray(c)) return } this.matches[index][e] = true; if (this.stat) this._stat(e); }; GlobSync.prototype._readdirInGlobStar = function (abs) { // follow all symlinked directories forever // just proceed as if this is a non-globstar situation if (this.follow) return this._readdir(abs, false) var entries; var lstat; try { lstat = this.fs.lstatSync(abs); } catch (er) { if (er.code === 'ENOENT') { // lstat failed, doesn't exist return null } } var isSym = lstat && lstat.isSymbolicLink(); this.symlinks[abs] = isSym; // If it's not a symlink or a dir, then it's definitely a regular file. // don't bother doing a readdir in that case. if (!isSym && lstat && !lstat.isDirectory()) this.cache[abs] = 'FILE'; else entries = this._readdir(abs, false); return entries }; GlobSync.prototype._readdir = function (abs, inGlobStar) { if (inGlobStar && !ownProp(this.symlinks, abs)) return this._readdirInGlobStar(abs) if (ownProp(this.cache, abs)) { var c = this.cache[abs]; if (!c || c === 'FILE') return null if (Array.isArray(c)) return c } try { return this._readdirEntries(abs, this.fs.readdirSync(abs)) } catch (er) { this._readdirError(abs, er); return null } }; GlobSync.prototype._readdirEntries = function (abs, entries) { // if we haven't asked to stat everything, then just // assume that everything in there exists, so we can avoid // having to stat it a second time. if (!this.mark && !this.stat) { for (var i = 0; i < entries.length; i ++) { var e = entries[i]; if (abs === '/') e = abs + e; else e = abs + '/' + e; this.cache[e] = true; } } this.cache[abs] = entries; // mark and cache dir-ness return entries }; GlobSync.prototype._readdirError = function (f, er) { // handle errors, and cache the information switch (er.code) { case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 case 'ENOTDIR': // totally normal. means it *does* exist. var abs = this._makeAbs(f); this.cache[abs] = 'FILE'; if (abs === this.cwdAbs) { var error = new Error(er.code + ' invalid cwd ' + this.cwd); error.path = this.cwd; error.code = er.code; throw error } break case 'ENOENT': // not terribly unusual case 'ELOOP': case 'ENAMETOOLONG': case 'UNKNOWN': this.cache[this._makeAbs(f)] = false; break default: // some unusual error. Treat as failure. this.cache[this._makeAbs(f)] = false; if (this.strict) throw er if (!this.silent) console.error('glob error', er); break } }; GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { var entries = this._readdir(abs, inGlobStar); // no entries means not a dir, so it can never have matches // foo.txt/** doesn't match foo.txt if (!entries) return // test without the globstar, and with every child both below // and replacing the globstar. var remainWithoutGlobStar = remain.slice(1); var gspref = prefix ? [ prefix ] : []; var noGlobStar = gspref.concat(remainWithoutGlobStar); // the noGlobStar pattern exits the inGlobStar state this._process(noGlobStar, index, false); var len = entries.length; var isSym = this.symlinks[abs]; // If it's a symlink, and we're in a globstar, then stop if (isSym && inGlobStar) return for (var i = 0; i < len; i++) { var e = entries[i]; if (e.charAt(0) === '.' && !this.dot) continue // these two cases enter the inGlobStar state var instead = gspref.concat(entries[i], remainWithoutGlobStar); this._process(instead, index, true); var below = gspref.concat(entries[i], remain); this._process(below, index, true); } }; GlobSync.prototype._processSimple = function (prefix, index) { // XXX review this. Shouldn't it be doing the mounting etc // before doing stat? kinda weird? var exists = this._stat(prefix); if (!this.matches[index]) this.matches[index] = Object.create(null); // If it doesn't exist, then just mark the lack of results if (!exists) return if (prefix && isAbsolute(prefix) && !this.nomount) { var trail = /[\/\\]$/.test(prefix); if (prefix.charAt(0) === '/') { prefix = path.join(this.root, prefix); } else { prefix = path.resolve(this.root, prefix); if (trail) prefix += '/'; } } if (process.platform === 'win32') prefix = prefix.replace(/\\/g, '/'); // Mark this as a match this._emitMatch(index, prefix); }; // Returns either 'DIR', 'FILE', or false GlobSync.prototype._stat = function (f) { var abs = this._makeAbs(f); var needDir = f.slice(-1) === '/'; if (f.length > this.maxLength) return false if (!this.stat && ownProp(this.cache, abs)) { var c = this.cache[abs]; if (Array.isArray(c)) c = 'DIR'; // It exists, but maybe not how we need it if (!needDir || c === 'DIR') return c if (needDir && c === 'FILE') return false // otherwise we have to stat, because maybe c=true // if we know it exists, but not what it is. } var stat = this.statCache[abs]; if (!stat) { var lstat; try { lstat = this.fs.lstatSync(abs); } catch (er) { if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { this.statCache[abs] = false; return false } } if (lstat && lstat.isSymbolicLink()) { try { stat = this.fs.statSync(abs); } catch (er) { stat = lstat; } } else { stat = lstat; } } this.statCache[abs] = stat; var c = true; if (stat) c = stat.isDirectory() ? 'DIR' : 'FILE'; this.cache[abs] = this.cache[abs] || c; if (needDir && c === 'FILE') return false return c }; GlobSync.prototype._mark = function (p) { return common.mark(this, p) }; GlobSync.prototype._makeAbs = function (f) { return common.makeAbs(this, f) }; return sync$9; } // Returns a wrapper function that returns a wrapped callback // The wrapper function should do some stuff, and return a // presumably different callback function. // This makes sure that own properties are retained, so that // decorations and such are not lost along the way. var wrappy_1 = wrappy$2; function wrappy$2 (fn, cb) { if (fn && cb) return wrappy$2(fn)(cb) if (typeof fn !== 'function') throw new TypeError('need wrapper function') Object.keys(fn).forEach(function (k) { wrapper[k] = fn[k]; }); return wrapper function wrapper() { var args = new Array(arguments.length); for (var i = 0; i < args.length; i++) { args[i] = arguments[i]; } var ret = fn.apply(this, args); var cb = args[args.length-1]; if (typeof ret === 'function' && ret !== cb) { Object.keys(cb).forEach(function (k) { ret[k] = cb[k]; }); } return ret } } var once$2 = {exports: {}}; var wrappy$1 = wrappy_1; once$2.exports = wrappy$1(once$1); once$2.exports.strict = wrappy$1(onceStrict); once$1.proto = once$1(function () { Object.defineProperty(Function.prototype, 'once', { value: function () { return once$1(this) }, configurable: true }); Object.defineProperty(Function.prototype, 'onceStrict', { value: function () { return onceStrict(this) }, configurable: true }); }); function once$1 (fn) { var f = function () { if (f.called) return f.value f.called = true; return f.value = fn.apply(this, arguments) }; f.called = false; return f } function onceStrict (fn) { var f = function () { if (f.called) throw new Error(f.onceError) f.called = true; return f.value = fn.apply(this, arguments) }; var name = fn.name || 'Function wrapped with `once`'; f.onceError = name + " shouldn't be called more than once"; f.called = false; return f } var onceExports = once$2.exports; var wrappy = wrappy_1; var reqs = Object.create(null); var once = onceExports; var inflight_1 = wrappy(inflight); function inflight (key, cb) { if (reqs[key]) { reqs[key].push(cb); return null } else { reqs[key] = [cb]; return makeres(key) } } function makeres (key) { return once(function RES () { var cbs = reqs[key]; var len = cbs.length; var args = slice$1(arguments); // XXX It's somewhat ambiguous whether a new callback added in this // pass should be queued for later execution if something in the // list of callbacks throws, or if it should just be discarded. // However, it's such an edge case that it hardly matters, and either // choice is likely as surprising as the other. // As it happens, we do go ahead and schedule it for later execution. try { for (var i = 0; i < len; i++) { cbs[i].apply(null, args); } } finally { if (cbs.length > len) { // added more in the interim. // de-zalgo, just in case, but don't call again. cbs.splice(0, len); process.nextTick(function () { RES.apply(null, args); }); } else { delete reqs[key]; } } }) } function slice$1 (args) { var length = args.length; var array = []; for (var i = 0; i < length; i++) array[i] = args[i]; return array } var glob_1; var hasRequiredGlob; function requireGlob () { if (hasRequiredGlob) return glob_1; hasRequiredGlob = 1; // Approach: // // 1. Get the minimatch set // 2. For each pattern in the set, PROCESS(pattern, false) // 3. Store matches per-set, then uniq them // // PROCESS(pattern, inGlobStar) // Get the first [n] items from pattern that are all strings // Join these together. This is PREFIX. // If there is no more remaining, then stat(PREFIX) and // add to matches if it succeeds. END. // // If inGlobStar and PREFIX is symlink and points to dir // set ENTRIES = [] // else readdir(PREFIX) as ENTRIES // If fail, END // // with ENTRIES // If pattern[n] is GLOBSTAR // // handle the case where the globstar match is empty // // by pruning it out, and testing the resulting pattern // PROCESS(pattern[0..n] + pattern[n+1 .. $], false) // // handle other cases. // for ENTRY in ENTRIES (not dotfiles) // // attach globstar + tail onto the entry // // Mark that this entry is a globstar match // PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) // // else // not globstar // for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) // Test ENTRY against pattern[n] // If fails, continue // If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) // // Caveat: // Cache all stats and readdirs results to minimize syscall. Since all // we ever care about is existence and directory-ness, we can just keep // `true` for files, and [children,...] for directories, or `false` for // things that don't exist. glob_1 = glob; var rp = fs_realpath; var minimatch = minimatch_1; var inherits = inheritsExports; var EE = require$$0$5.EventEmitter; var path = require$$0$4; var assert = require$$5; var isAbsolute = require$$0$4.isAbsolute; var globSync = requireSync(); var common = common$c; var setopts = common.setopts; var ownProp = common.ownProp; var inflight = inflight_1; var childrenIgnored = common.childrenIgnored; var isIgnored = common.isIgnored; var once = onceExports; function glob (pattern, options, cb) { if (typeof options === 'function') cb = options, options = {}; if (!options) options = {}; if (options.sync) { if (cb) throw new TypeError('callback provided to sync glob') return globSync(pattern, options) } return new Glob(pattern, options, cb) } glob.sync = globSync; var GlobSync = glob.GlobSync = globSync.GlobSync; // old api surface glob.glob = glob; function extend (origin, add) { if (add === null || typeof add !== 'object') { return origin } var keys = Object.keys(add); var i = keys.length; while (i--) { origin[keys[i]] = add[keys[i]]; } return origin } glob.hasMagic = function (pattern, options_) { var options = extend({}, options_); options.noprocess = true; var g = new Glob(pattern, options); var set = g.minimatch.set; if (!pattern) return false if (set.length > 1) return true for (var j = 0; j < set[0].length; j++) { if (typeof set[0][j] !== 'string') return true } return false }; glob.Glob = Glob; inherits(Glob, EE); function Glob (pattern, options, cb) { if (typeof options === 'function') { cb = options; options = null; } if (options && options.sync) { if (cb) throw new TypeError('callback provided to sync glob') return new GlobSync(pattern, options) } if (!(this instanceof Glob)) return new Glob(pattern, options, cb) setopts(this, pattern, options); this._didRealPath = false; // process each pattern in the minimatch set var n = this.minimatch.set.length; // The matches are stored as {: true,...} so that // duplicates are automagically pruned. // Later, we do an Object.keys() on these. // Keep them as a list so we can fill in when nonull is set. this.matches = new Array(n); if (typeof cb === 'function') { cb = once(cb); this.on('error', cb); this.on('end', function (matches) { cb(null, matches); }); } var self = this; this._processing = 0; this._emitQueue = []; this._processQueue = []; this.paused = false; if (this.noprocess) return this if (n === 0) return done() var sync = true; for (var i = 0; i < n; i ++) { this._process(this.minimatch.set[i], i, false, done); } sync = false; function done () { --self._processing; if (self._processing <= 0) { if (sync) { process.nextTick(function () { self._finish(); }); } else { self._finish(); } } } } Glob.prototype._finish = function () { assert(this instanceof Glob); if (this.aborted) return if (this.realpath && !this._didRealpath) return this._realpath() common.finish(this); this.emit('end', this.found); }; Glob.prototype._realpath = function () { if (this._didRealpath) return this._didRealpath = true; var n = this.matches.length; if (n === 0) return this._finish() var self = this; for (var i = 0; i < this.matches.length; i++) this._realpathSet(i, next); function next () { if (--n === 0) self._finish(); } }; Glob.prototype._realpathSet = function (index, cb) { var matchset = this.matches[index]; if (!matchset) return cb() var found = Object.keys(matchset); var self = this; var n = found.length; if (n === 0) return cb() var set = this.matches[index] = Object.create(null); found.forEach(function (p, i) { // If there's a problem with the stat, then it means that // one or more of the links in the realpath couldn't be // resolved. just return the abs value in that case. p = self._makeAbs(p); rp.realpath(p, self.realpathCache, function (er, real) { if (!er) set[real] = true; else if (er.syscall === 'stat') set[p] = true; else self.emit('error', er); // srsly wtf right here if (--n === 0) { self.matches[index] = set; cb(); } }); }); }; Glob.prototype._mark = function (p) { return common.mark(this, p) }; Glob.prototype._makeAbs = function (f) { return common.makeAbs(this, f) }; Glob.prototype.abort = function () { this.aborted = true; this.emit('abort'); }; Glob.prototype.pause = function () { if (!this.paused) { this.paused = true; this.emit('pause'); } }; Glob.prototype.resume = function () { if (this.paused) { this.emit('resume'); this.paused = false; if (this._emitQueue.length) { var eq = this._emitQueue.slice(0); this._emitQueue.length = 0; for (var i = 0; i < eq.length; i ++) { var e = eq[i]; this._emitMatch(e[0], e[1]); } } if (this._processQueue.length) { var pq = this._processQueue.slice(0); this._processQueue.length = 0; for (var i = 0; i < pq.length; i ++) { var p = pq[i]; this._processing--; this._process(p[0], p[1], p[2], p[3]); } } } }; Glob.prototype._process = function (pattern, index, inGlobStar, cb) { assert(this instanceof Glob); assert(typeof cb === 'function'); if (this.aborted) return this._processing++; if (this.paused) { this._processQueue.push([pattern, index, inGlobStar, cb]); return } //console.error('PROCESS %d', this._processing, pattern) // Get the first [n] parts of pattern that are all strings. var n = 0; while (typeof pattern[n] === 'string') { n ++; } // now n is the index of the first one that is *not* a string. // see if there's anything else var prefix; switch (n) { // if not, then this is rather simple case pattern.length: this._processSimple(pattern.join('/'), index, cb); return case 0: // pattern *starts* with some non-trivial item. // going to readdir(cwd), but not include the prefix in matches. prefix = null; break default: // pattern has some string bits in the front. // whatever it starts with, whether that's 'absolute' like /foo/bar, // or 'relative' like '../baz' prefix = pattern.slice(0, n).join('/'); break } var remain = pattern.slice(n); // get the list of entries. var read; if (prefix === null) read = '.'; else if (isAbsolute(prefix) || isAbsolute(pattern.map(function (p) { return typeof p === 'string' ? p : '[*]' }).join('/'))) { if (!prefix || !isAbsolute(prefix)) prefix = '/' + prefix; read = prefix; } else read = prefix; var abs = this._makeAbs(read); //if ignored, skip _processing if (childrenIgnored(this, read)) return cb() var isGlobStar = remain[0] === minimatch.GLOBSTAR; if (isGlobStar) this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb); else this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb); }; Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { var self = this; this._readdir(abs, inGlobStar, function (er, entries) { return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) }); }; Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { // if the abs isn't a dir, then nothing can match! if (!entries) return cb() // It will only match dot entries if it starts with a dot, or if // dot is set. Stuff like @(.foo|.bar) isn't allowed. var pn = remain[0]; var negate = !!this.minimatch.negate; var rawGlob = pn._glob; var dotOk = this.dot || rawGlob.charAt(0) === '.'; var matchedEntries = []; for (var i = 0; i < entries.length; i++) { var e = entries[i]; if (e.charAt(0) !== '.' || dotOk) { var m; if (negate && !prefix) { m = !e.match(pn); } else { m = e.match(pn); } if (m) matchedEntries.push(e); } } //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) var len = matchedEntries.length; // If there are no matched entries, then nothing matches. if (len === 0) return cb() // if this is the last remaining pattern bit, then no need for // an additional stat *unless* the user has specified mark or // stat explicitly. We know they exist, since readdir returned // them. if (remain.length === 1 && !this.mark && !this.stat) { if (!this.matches[index]) this.matches[index] = Object.create(null); for (var i = 0; i < len; i ++) { var e = matchedEntries[i]; if (prefix) { if (prefix !== '/') e = prefix + '/' + e; else e = prefix + e; } if (e.charAt(0) === '/' && !this.nomount) { e = path.join(this.root, e); } this._emitMatch(index, e); } // This was the last one, and no stats were needed return cb() } // now test all matched entries as stand-ins for that part // of the pattern. remain.shift(); for (var i = 0; i < len; i ++) { var e = matchedEntries[i]; if (prefix) { if (prefix !== '/') e = prefix + '/' + e; else e = prefix + e; } this._process([e].concat(remain), index, inGlobStar, cb); } cb(); }; Glob.prototype._emitMatch = function (index, e) { if (this.aborted) return if (isIgnored(this, e)) return if (this.paused) { this._emitQueue.push([index, e]); return } var abs = isAbsolute(e) ? e : this._makeAbs(e); if (this.mark) e = this._mark(e); if (this.absolute) e = abs; if (this.matches[index][e]) return if (this.nodir) { var c = this.cache[abs]; if (c === 'DIR' || Array.isArray(c)) return } this.matches[index][e] = true; var st = this.statCache[abs]; if (st) this.emit('stat', e, st); this.emit('match', e); }; Glob.prototype._readdirInGlobStar = function (abs, cb) { if (this.aborted) return // follow all symlinked directories forever // just proceed as if this is a non-globstar situation if (this.follow) return this._readdir(abs, false, cb) var lstatkey = 'lstat\0' + abs; var self = this; var lstatcb = inflight(lstatkey, lstatcb_); if (lstatcb) self.fs.lstat(abs, lstatcb); function lstatcb_ (er, lstat) { if (er && er.code === 'ENOENT') return cb() var isSym = lstat && lstat.isSymbolicLink(); self.symlinks[abs] = isSym; // If it's not a symlink or a dir, then it's definitely a regular file. // don't bother doing a readdir in that case. if (!isSym && lstat && !lstat.isDirectory()) { self.cache[abs] = 'FILE'; cb(); } else self._readdir(abs, false, cb); } }; Glob.prototype._readdir = function (abs, inGlobStar, cb) { if (this.aborted) return cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb); if (!cb) return //console.error('RD %j %j', +inGlobStar, abs) if (inGlobStar && !ownProp(this.symlinks, abs)) return this._readdirInGlobStar(abs, cb) if (ownProp(this.cache, abs)) { var c = this.cache[abs]; if (!c || c === 'FILE') return cb() if (Array.isArray(c)) return cb(null, c) } var self = this; self.fs.readdir(abs, readdirCb(this, abs, cb)); }; function readdirCb (self, abs, cb) { return function (er, entries) { if (er) self._readdirError(abs, er, cb); else self._readdirEntries(abs, entries, cb); } } Glob.prototype._readdirEntries = function (abs, entries, cb) { if (this.aborted) return // if we haven't asked to stat everything, then just // assume that everything in there exists, so we can avoid // having to stat it a second time. if (!this.mark && !this.stat) { for (var i = 0; i < entries.length; i ++) { var e = entries[i]; if (abs === '/') e = abs + e; else e = abs + '/' + e; this.cache[e] = true; } } this.cache[abs] = entries; return cb(null, entries) }; Glob.prototype._readdirError = function (f, er, cb) { if (this.aborted) return // handle errors, and cache the information switch (er.code) { case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 case 'ENOTDIR': // totally normal. means it *does* exist. var abs = this._makeAbs(f); this.cache[abs] = 'FILE'; if (abs === this.cwdAbs) { var error = new Error(er.code + ' invalid cwd ' + this.cwd); error.path = this.cwd; error.code = er.code; this.emit('error', error); this.abort(); } break case 'ENOENT': // not terribly unusual case 'ELOOP': case 'ENAMETOOLONG': case 'UNKNOWN': this.cache[this._makeAbs(f)] = false; break default: // some unusual error. Treat as failure. this.cache[this._makeAbs(f)] = false; if (this.strict) { this.emit('error', er); // If the error is handled, then we abort // if not, we threw out of here this.abort(); } if (!this.silent) console.error('glob error', er); break } return cb() }; Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { var self = this; this._readdir(abs, inGlobStar, function (er, entries) { self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb); }); }; Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { //console.error('pgs2', prefix, remain[0], entries) // no entries means not a dir, so it can never have matches // foo.txt/** doesn't match foo.txt if (!entries) return cb() // test without the globstar, and with every child both below // and replacing the globstar. var remainWithoutGlobStar = remain.slice(1); var gspref = prefix ? [ prefix ] : []; var noGlobStar = gspref.concat(remainWithoutGlobStar); // the noGlobStar pattern exits the inGlobStar state this._process(noGlobStar, index, false, cb); var isSym = this.symlinks[abs]; var len = entries.length; // If it's a symlink, and we're in a globstar, then stop if (isSym && inGlobStar) return cb() for (var i = 0; i < len; i++) { var e = entries[i]; if (e.charAt(0) === '.' && !this.dot) continue // these two cases enter the inGlobStar state var instead = gspref.concat(entries[i], remainWithoutGlobStar); this._process(instead, index, true, cb); var below = gspref.concat(entries[i], remain); this._process(below, index, true, cb); } cb(); }; Glob.prototype._processSimple = function (prefix, index, cb) { // XXX review this. Shouldn't it be doing the mounting etc // before doing stat? kinda weird? var self = this; this._stat(prefix, function (er, exists) { self._processSimple2(prefix, index, er, exists, cb); }); }; Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { //console.error('ps2', prefix, exists) if (!this.matches[index]) this.matches[index] = Object.create(null); // If it doesn't exist, then just mark the lack of results if (!exists) return cb() if (prefix && isAbsolute(prefix) && !this.nomount) { var trail = /[\/\\]$/.test(prefix); if (prefix.charAt(0) === '/') { prefix = path.join(this.root, prefix); } else { prefix = path.resolve(this.root, prefix); if (trail) prefix += '/'; } } if (process.platform === 'win32') prefix = prefix.replace(/\\/g, '/'); // Mark this as a match this._emitMatch(index, prefix); cb(); }; // Returns either 'DIR', 'FILE', or false Glob.prototype._stat = function (f, cb) { var abs = this._makeAbs(f); var needDir = f.slice(-1) === '/'; if (f.length > this.maxLength) return cb() if (!this.stat && ownProp(this.cache, abs)) { var c = this.cache[abs]; if (Array.isArray(c)) c = 'DIR'; // It exists, but maybe not how we need it if (!needDir || c === 'DIR') return cb(null, c) if (needDir && c === 'FILE') return cb() // otherwise we have to stat, because maybe c=true // if we know it exists, but not what it is. } var stat = this.statCache[abs]; if (stat !== undefined) { if (stat === false) return cb(null, stat) else { var type = stat.isDirectory() ? 'DIR' : 'FILE'; if (needDir && type === 'FILE') return cb() else return cb(null, type, stat) } } var self = this; var statcb = inflight('stat\0' + abs, lstatcb_); if (statcb) self.fs.lstat(abs, statcb); function lstatcb_ (er, lstat) { if (lstat && lstat.isSymbolicLink()) { // If it's a symlink, then treat it as the target, unless // the target does not exist, then treat it as a file. return self.fs.stat(abs, function (er, stat) { if (er) self._stat2(f, abs, null, lstat, cb); else self._stat2(f, abs, er, stat, cb); }) } else { self._stat2(f, abs, er, lstat, cb); } } }; Glob.prototype._stat2 = function (f, abs, er, stat, cb) { if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { this.statCache[abs] = false; return cb() } var needDir = f.slice(-1) === '/'; this.statCache[abs] = stat; if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) return cb(null, false, stat) var c = true; if (stat) c = stat.isDirectory() ? 'DIR' : 'FILE'; this.cache[abs] = this.cache[abs] || c; if (needDir && c === 'FILE') return cb() return cb(null, c, stat) }; return glob_1; } var globExports = requireGlob(); var glob$1 = /*@__PURE__*/getDefaultExportFromCjs(globExports); const comma = ','.charCodeAt(0); const semicolon = ';'.charCodeAt(0); const chars$1 = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; const intToChar = new Uint8Array(64); // 64 possible chars. const charToInt = new Uint8Array(128); // z is 122 in ASCII for (let i = 0; i < chars$1.length; i++) { const c = chars$1.charCodeAt(i); intToChar[i] = c; charToInt[c] = i; } // Provide a fallback for older environments. const td = typeof TextDecoder !== 'undefined' ? /* #__PURE__ */ new TextDecoder() : typeof Buffer !== 'undefined' ? { decode(buf) { const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); return out.toString(); }, } : { decode(buf) { let out = ''; for (let i = 0; i < buf.length; i++) { out += String.fromCharCode(buf[i]); } return out; }, }; function decode(mappings) { const state = new Int32Array(5); const decoded = []; let index = 0; do { const semi = indexOf(mappings, index); const line = []; let sorted = true; let lastCol = 0; state[0] = 0; for (let i = index; i < semi; i++) { let seg; i = decodeInteger(mappings, i, state, 0); // genColumn const col = state[0]; if (col < lastCol) sorted = false; lastCol = col; if (hasMoreVlq(mappings, i, semi)) { i = decodeInteger(mappings, i, state, 1); // sourcesIndex i = decodeInteger(mappings, i, state, 2); // sourceLine i = decodeInteger(mappings, i, state, 3); // sourceColumn if (hasMoreVlq(mappings, i, semi)) { i = decodeInteger(mappings, i, state, 4); // namesIndex seg = [col, state[1], state[2], state[3], state[4]]; } else { seg = [col, state[1], state[2], state[3]]; } } else { seg = [col]; } line.push(seg); } if (!sorted) sort(line); decoded.push(line); index = semi + 1; } while (index <= mappings.length); return decoded; } function indexOf(mappings, index) { const idx = mappings.indexOf(';', index); return idx === -1 ? mappings.length : idx; } function decodeInteger(mappings, pos, state, j) { let value = 0; let shift = 0; let integer = 0; do { const c = mappings.charCodeAt(pos++); integer = charToInt[c]; value |= (integer & 31) << shift; shift += 5; } while (integer & 32); const shouldNegate = value & 1; value >>>= 1; if (shouldNegate) { value = -0x80000000 | -value; } state[j] += value; return pos; } function hasMoreVlq(mappings, i, length) { if (i >= length) return false; return mappings.charCodeAt(i) !== comma; } function sort(line) { line.sort(sortComparator$1); } function sortComparator$1(a, b) { return a[0] - b[0]; } function encode$1(decoded) { const state = new Int32Array(5); const bufLength = 1024 * 16; const subLength = bufLength - 36; const buf = new Uint8Array(bufLength); const sub = buf.subarray(0, subLength); let pos = 0; let out = ''; for (let i = 0; i < decoded.length; i++) { const line = decoded[i]; if (i > 0) { if (pos === bufLength) { out += td.decode(buf); pos = 0; } buf[pos++] = semicolon; } if (line.length === 0) continue; state[0] = 0; for (let j = 0; j < line.length; j++) { const segment = line[j]; // We can push up to 5 ints, each int can take at most 7 chars, and we // may push a comma. if (pos > subLength) { out += td.decode(sub); buf.copyWithin(0, subLength, pos); pos -= subLength; } if (j > 0) buf[pos++] = comma; pos = encodeInteger(buf, pos, state, segment, 0); // genColumn if (segment.length === 1) continue; pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn if (segment.length === 4) continue; pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex } } return out + td.decode(buf.subarray(0, pos)); } function encodeInteger(buf, pos, state, segment, j) { const next = segment[j]; let num = next - state[j]; state[j] = next; num = num < 0 ? (-num << 1) | 1 : num << 1; do { let clamped = num & 0b011111; num >>>= 5; if (num > 0) clamped |= 0b100000; buf[pos++] = intToChar[clamped]; } while (num > 0); return pos; } class BitSet { constructor(arg) { this.bits = arg instanceof BitSet ? arg.bits.slice() : []; } add(n) { this.bits[n >> 5] |= 1 << (n & 31); } has(n) { return !!(this.bits[n >> 5] & (1 << (n & 31))); } } class Chunk { constructor(start, end, content) { this.start = start; this.end = end; this.original = content; this.intro = ''; this.outro = ''; this.content = content; this.storeName = false; this.edited = false; { this.previous = null; this.next = null; } } appendLeft(content) { this.outro += content; } appendRight(content) { this.intro = this.intro + content; } clone() { const chunk = new Chunk(this.start, this.end, this.original); chunk.intro = this.intro; chunk.outro = this.outro; chunk.content = this.content; chunk.storeName = this.storeName; chunk.edited = this.edited; return chunk; } contains(index) { return this.start < index && index < this.end; } eachNext(fn) { let chunk = this; while (chunk) { fn(chunk); chunk = chunk.next; } } eachPrevious(fn) { let chunk = this; while (chunk) { fn(chunk); chunk = chunk.previous; } } edit(content, storeName, contentOnly) { this.content = content; if (!contentOnly) { this.intro = ''; this.outro = ''; } this.storeName = storeName; this.edited = true; return this; } prependLeft(content) { this.outro = content + this.outro; } prependRight(content) { this.intro = content + this.intro; } reset() { this.intro = ''; this.outro = ''; if (this.edited) { this.content = this.original; this.storeName = false; this.edited = false; } } split(index) { const sliceIndex = index - this.start; const originalBefore = this.original.slice(0, sliceIndex); const originalAfter = this.original.slice(sliceIndex); this.original = originalBefore; const newChunk = new Chunk(index, this.end, originalAfter); newChunk.outro = this.outro; this.outro = ''; this.end = index; if (this.edited) { // after split we should save the edit content record into the correct chunk // to make sure sourcemap correct // For example: // ' test'.trim() // split -> ' ' + 'test' // ✔️ edit -> '' + 'test' // ✖️ edit -> 'test' + '' // TODO is this block necessary?... newChunk.edit('', false); this.content = ''; } else { this.content = originalBefore; } newChunk.next = this.next; if (newChunk.next) newChunk.next.previous = newChunk; newChunk.previous = this; this.next = newChunk; return newChunk; } toString() { return this.intro + this.content + this.outro; } trimEnd(rx) { this.outro = this.outro.replace(rx, ''); if (this.outro.length) return true; const trimmed = this.content.replace(rx, ''); if (trimmed.length) { if (trimmed !== this.content) { this.split(this.start + trimmed.length).edit('', undefined, true); if (this.edited) { // save the change, if it has been edited this.edit(trimmed, this.storeName, true); } } return true; } else { this.edit('', undefined, true); this.intro = this.intro.replace(rx, ''); if (this.intro.length) return true; } } trimStart(rx) { this.intro = this.intro.replace(rx, ''); if (this.intro.length) return true; const trimmed = this.content.replace(rx, ''); if (trimmed.length) { if (trimmed !== this.content) { const newChunk = this.split(this.end - trimmed.length); if (this.edited) { // save the change, if it has been edited newChunk.edit(trimmed, this.storeName, true); } this.edit('', undefined, true); } return true; } else { this.edit('', undefined, true); this.outro = this.outro.replace(rx, ''); if (this.outro.length) return true; } } } function getBtoa() { if (typeof globalThis !== 'undefined' && typeof globalThis.btoa === 'function') { return (str) => globalThis.btoa(unescape(encodeURIComponent(str))); } else if (typeof Buffer === 'function') { return (str) => Buffer.from(str, 'utf-8').toString('base64'); } else { return () => { throw new Error('Unsupported environment: `window.btoa` or `Buffer` should be supported.'); }; } } const btoa$1 = /*#__PURE__*/ getBtoa(); let SourceMap$1 = class SourceMap { constructor(properties) { this.version = 3; this.file = properties.file; this.sources = properties.sources; this.sourcesContent = properties.sourcesContent; this.names = properties.names; this.mappings = encode$1(properties.mappings); if (typeof properties.x_google_ignoreList !== 'undefined') { this.x_google_ignoreList = properties.x_google_ignoreList; } } toString() { return JSON.stringify(this); } toUrl() { return 'data:application/json;charset=utf-8;base64,' + btoa$1(this.toString()); } }; function guessIndent(code) { const lines = code.split('\n'); const tabbed = lines.filter((line) => /^\t+/.test(line)); const spaced = lines.filter((line) => /^ {2,}/.test(line)); if (tabbed.length === 0 && spaced.length === 0) { return null; } // More lines tabbed than spaced? Assume tabs, and // default to tabs in the case of a tie (or nothing // to go on) if (tabbed.length >= spaced.length) { return '\t'; } // Otherwise, we need to guess the multiple const min = spaced.reduce((previous, current) => { const numSpaces = /^ +/.exec(current)[0].length; return Math.min(numSpaces, previous); }, Infinity); return new Array(min + 1).join(' '); } function getRelativePath(from, to) { const fromParts = from.split(/[/\\]/); const toParts = to.split(/[/\\]/); fromParts.pop(); // get dirname while (fromParts[0] === toParts[0]) { fromParts.shift(); toParts.shift(); } if (fromParts.length) { let i = fromParts.length; while (i--) fromParts[i] = '..'; } return fromParts.concat(toParts).join('/'); } const toString$2 = Object.prototype.toString; function isObject$2(thing) { return toString$2.call(thing) === '[object Object]'; } function getLocator(source) { const originalLines = source.split('\n'); const lineOffsets = []; for (let i = 0, pos = 0; i < originalLines.length; i++) { lineOffsets.push(pos); pos += originalLines[i].length + 1; } return function locate(index) { let i = 0; let j = lineOffsets.length; while (i < j) { const m = (i + j) >> 1; if (index < lineOffsets[m]) { j = m; } else { i = m + 1; } } const line = i - 1; const column = index - lineOffsets[line]; return { line, column }; }; } const wordRegex = /\w/; class Mappings { constructor(hires) { this.hires = hires; this.generatedCodeLine = 0; this.generatedCodeColumn = 0; this.raw = []; this.rawSegments = this.raw[this.generatedCodeLine] = []; this.pending = null; } addEdit(sourceIndex, content, loc, nameIndex) { if (content.length) { const contentLengthMinusOne = content.length - 1; let contentLineEnd = content.indexOf('\n', 0); let previousContentLineEnd = -1; // Loop through each line in the content and add a segment, but stop if the last line is empty, // else code afterwards would fill one line too many while (contentLineEnd >= 0 && contentLengthMinusOne > contentLineEnd) { const segment = [this.generatedCodeColumn, sourceIndex, loc.line, loc.column]; if (nameIndex >= 0) { segment.push(nameIndex); } this.rawSegments.push(segment); this.generatedCodeLine += 1; this.raw[this.generatedCodeLine] = this.rawSegments = []; this.generatedCodeColumn = 0; previousContentLineEnd = contentLineEnd; contentLineEnd = content.indexOf('\n', contentLineEnd + 1); } const segment = [this.generatedCodeColumn, sourceIndex, loc.line, loc.column]; if (nameIndex >= 0) { segment.push(nameIndex); } this.rawSegments.push(segment); this.advance(content.slice(previousContentLineEnd + 1)); } else if (this.pending) { this.rawSegments.push(this.pending); this.advance(content); } this.pending = null; } addUneditedChunk(sourceIndex, chunk, original, loc, sourcemapLocations) { let originalCharIndex = chunk.start; let first = true; // when iterating each char, check if it's in a word boundary let charInHiresBoundary = false; while (originalCharIndex < chunk.end) { if (this.hires || first || sourcemapLocations.has(originalCharIndex)) { const segment = [this.generatedCodeColumn, sourceIndex, loc.line, loc.column]; if (this.hires === 'boundary') { // in hires "boundary", group segments per word boundary than per char if (wordRegex.test(original[originalCharIndex])) { // for first char in the boundary found, start the boundary by pushing a segment if (!charInHiresBoundary) { this.rawSegments.push(segment); charInHiresBoundary = true; } } else { // for non-word char, end the boundary by pushing a segment this.rawSegments.push(segment); charInHiresBoundary = false; } } else { this.rawSegments.push(segment); } } if (original[originalCharIndex] === '\n') { loc.line += 1; loc.column = 0; this.generatedCodeLine += 1; this.raw[this.generatedCodeLine] = this.rawSegments = []; this.generatedCodeColumn = 0; first = true; } else { loc.column += 1; this.generatedCodeColumn += 1; first = false; } originalCharIndex += 1; } this.pending = null; } advance(str) { if (!str) return; const lines = str.split('\n'); if (lines.length > 1) { for (let i = 0; i < lines.length - 1; i++) { this.generatedCodeLine++; this.raw[this.generatedCodeLine] = this.rawSegments = []; } this.generatedCodeColumn = 0; } this.generatedCodeColumn += lines[lines.length - 1].length; } } const n$1 = '\n'; const warned = { insertLeft: false, insertRight: false, storeName: false, }; class MagicString { constructor(string, options = {}) { const chunk = new Chunk(0, string.length, string); Object.defineProperties(this, { original: { writable: true, value: string }, outro: { writable: true, value: '' }, intro: { writable: true, value: '' }, firstChunk: { writable: true, value: chunk }, lastChunk: { writable: true, value: chunk }, lastSearchedChunk: { writable: true, value: chunk }, byStart: { writable: true, value: {} }, byEnd: { writable: true, value: {} }, filename: { writable: true, value: options.filename }, indentExclusionRanges: { writable: true, value: options.indentExclusionRanges }, sourcemapLocations: { writable: true, value: new BitSet() }, storedNames: { writable: true, value: {} }, indentStr: { writable: true, value: undefined }, ignoreList: { writable: true, value: options.ignoreList }, }); this.byStart[0] = chunk; this.byEnd[string.length] = chunk; } addSourcemapLocation(char) { this.sourcemapLocations.add(char); } append(content) { if (typeof content !== 'string') throw new TypeError('outro content must be a string'); this.outro += content; return this; } appendLeft(index, content) { if (typeof content !== 'string') throw new TypeError('inserted content must be a string'); this._split(index); const chunk = this.byEnd[index]; if (chunk) { chunk.appendLeft(content); } else { this.intro += content; } return this; } appendRight(index, content) { if (typeof content !== 'string') throw new TypeError('inserted content must be a string'); this._split(index); const chunk = this.byStart[index]; if (chunk) { chunk.appendRight(content); } else { this.outro += content; } return this; } clone() { const cloned = new MagicString(this.original, { filename: this.filename }); let originalChunk = this.firstChunk; let clonedChunk = (cloned.firstChunk = cloned.lastSearchedChunk = originalChunk.clone()); while (originalChunk) { cloned.byStart[clonedChunk.start] = clonedChunk; cloned.byEnd[clonedChunk.end] = clonedChunk; const nextOriginalChunk = originalChunk.next; const nextClonedChunk = nextOriginalChunk && nextOriginalChunk.clone(); if (nextClonedChunk) { clonedChunk.next = nextClonedChunk; nextClonedChunk.previous = clonedChunk; clonedChunk = nextClonedChunk; } originalChunk = nextOriginalChunk; } cloned.lastChunk = clonedChunk; if (this.indentExclusionRanges) { cloned.indentExclusionRanges = this.indentExclusionRanges.slice(); } cloned.sourcemapLocations = new BitSet(this.sourcemapLocations); cloned.intro = this.intro; cloned.outro = this.outro; return cloned; } generateDecodedMap(options) { options = options || {}; const sourceIndex = 0; const names = Object.keys(this.storedNames); const mappings = new Mappings(options.hires); const locate = getLocator(this.original); if (this.intro) { mappings.advance(this.intro); } this.firstChunk.eachNext((chunk) => { const loc = locate(chunk.start); if (chunk.intro.length) mappings.advance(chunk.intro); if (chunk.edited) { mappings.addEdit( sourceIndex, chunk.content, loc, chunk.storeName ? names.indexOf(chunk.original) : -1, ); } else { mappings.addUneditedChunk(sourceIndex, chunk, this.original, loc, this.sourcemapLocations); } if (chunk.outro.length) mappings.advance(chunk.outro); }); return { file: options.file ? options.file.split(/[/\\]/).pop() : undefined, sources: [ options.source ? getRelativePath(options.file || '', options.source) : options.file || '', ], sourcesContent: options.includeContent ? [this.original] : undefined, names, mappings: mappings.raw, x_google_ignoreList: this.ignoreList ? [sourceIndex] : undefined, }; } generateMap(options) { return new SourceMap$1(this.generateDecodedMap(options)); } _ensureindentStr() { if (this.indentStr === undefined) { this.indentStr = guessIndent(this.original); } } _getRawIndentString() { this._ensureindentStr(); return this.indentStr; } getIndentString() { this._ensureindentStr(); return this.indentStr === null ? '\t' : this.indentStr; } indent(indentStr, options) { const pattern = /^[^\r\n]/gm; if (isObject$2(indentStr)) { options = indentStr; indentStr = undefined; } if (indentStr === undefined) { this._ensureindentStr(); indentStr = this.indentStr || '\t'; } if (indentStr === '') return this; // noop options = options || {}; // Process exclusion ranges const isExcluded = {}; if (options.exclude) { const exclusions = typeof options.exclude[0] === 'number' ? [options.exclude] : options.exclude; exclusions.forEach((exclusion) => { for (let i = exclusion[0]; i < exclusion[1]; i += 1) { isExcluded[i] = true; } }); } let shouldIndentNextCharacter = options.indentStart !== false; const replacer = (match) => { if (shouldIndentNextCharacter) return `${indentStr}${match}`; shouldIndentNextCharacter = true; return match; }; this.intro = this.intro.replace(pattern, replacer); let charIndex = 0; let chunk = this.firstChunk; while (chunk) { const end = chunk.end; if (chunk.edited) { if (!isExcluded[charIndex]) { chunk.content = chunk.content.replace(pattern, replacer); if (chunk.content.length) { shouldIndentNextCharacter = chunk.content[chunk.content.length - 1] === '\n'; } } } else { charIndex = chunk.start; while (charIndex < end) { if (!isExcluded[charIndex]) { const char = this.original[charIndex]; if (char === '\n') { shouldIndentNextCharacter = true; } else if (char !== '\r' && shouldIndentNextCharacter) { shouldIndentNextCharacter = false; if (charIndex === chunk.start) { chunk.prependRight(indentStr); } else { this._splitChunk(chunk, charIndex); chunk = chunk.next; chunk.prependRight(indentStr); } } } charIndex += 1; } } charIndex = chunk.end; chunk = chunk.next; } this.outro = this.outro.replace(pattern, replacer); return this; } insert() { throw new Error( 'magicString.insert(...) is deprecated. Use prependRight(...) or appendLeft(...)', ); } insertLeft(index, content) { if (!warned.insertLeft) { console.warn( 'magicString.insertLeft(...) is deprecated. Use magicString.appendLeft(...) instead', ); // eslint-disable-line no-console warned.insertLeft = true; } return this.appendLeft(index, content); } insertRight(index, content) { if (!warned.insertRight) { console.warn( 'magicString.insertRight(...) is deprecated. Use magicString.prependRight(...) instead', ); // eslint-disable-line no-console warned.insertRight = true; } return this.prependRight(index, content); } move(start, end, index) { if (index >= start && index <= end) throw new Error('Cannot move a selection inside itself'); this._split(start); this._split(end); this._split(index); const first = this.byStart[start]; const last = this.byEnd[end]; const oldLeft = first.previous; const oldRight = last.next; const newRight = this.byStart[index]; if (!newRight && last === this.lastChunk) return this; const newLeft = newRight ? newRight.previous : this.lastChunk; if (oldLeft) oldLeft.next = oldRight; if (oldRight) oldRight.previous = oldLeft; if (newLeft) newLeft.next = first; if (newRight) newRight.previous = last; if (!first.previous) this.firstChunk = last.next; if (!last.next) { this.lastChunk = first.previous; this.lastChunk.next = null; } first.previous = newLeft; last.next = newRight || null; if (!newLeft) this.firstChunk = first; if (!newRight) this.lastChunk = last; return this; } overwrite(start, end, content, options) { options = options || {}; return this.update(start, end, content, { ...options, overwrite: !options.contentOnly }); } update(start, end, content, options) { if (typeof content !== 'string') throw new TypeError('replacement content must be a string'); while (start < 0) start += this.original.length; while (end < 0) end += this.original.length; if (end > this.original.length) throw new Error('end is out of bounds'); if (start === end) throw new Error( 'Cannot overwrite a zero-length range – use appendLeft or prependRight instead', ); this._split(start); this._split(end); if (options === true) { if (!warned.storeName) { console.warn( 'The final argument to magicString.overwrite(...) should be an options object. See https://github.com/rich-harris/magic-string', ); // eslint-disable-line no-console warned.storeName = true; } options = { storeName: true }; } const storeName = options !== undefined ? options.storeName : false; const overwrite = options !== undefined ? options.overwrite : false; if (storeName) { const original = this.original.slice(start, end); Object.defineProperty(this.storedNames, original, { writable: true, value: true, enumerable: true, }); } const first = this.byStart[start]; const last = this.byEnd[end]; if (first) { let chunk = first; while (chunk !== last) { if (chunk.next !== this.byStart[chunk.end]) { throw new Error('Cannot overwrite across a split point'); } chunk = chunk.next; chunk.edit('', false); } first.edit(content, storeName, !overwrite); } else { // must be inserting at the end const newChunk = new Chunk(start, end, '').edit(content, storeName); // TODO last chunk in the array may not be the last chunk, if it's moved... last.next = newChunk; newChunk.previous = last; } return this; } prepend(content) { if (typeof content !== 'string') throw new TypeError('outro content must be a string'); this.intro = content + this.intro; return this; } prependLeft(index, content) { if (typeof content !== 'string') throw new TypeError('inserted content must be a string'); this._split(index); const chunk = this.byEnd[index]; if (chunk) { chunk.prependLeft(content); } else { this.intro = content + this.intro; } return this; } prependRight(index, content) { if (typeof content !== 'string') throw new TypeError('inserted content must be a string'); this._split(index); const chunk = this.byStart[index]; if (chunk) { chunk.prependRight(content); } else { this.outro = content + this.outro; } return this; } remove(start, end) { while (start < 0) start += this.original.length; while (end < 0) end += this.original.length; if (start === end) return this; if (start < 0 || end > this.original.length) throw new Error('Character is out of bounds'); if (start > end) throw new Error('end must be greater than start'); this._split(start); this._split(end); let chunk = this.byStart[start]; while (chunk) { chunk.intro = ''; chunk.outro = ''; chunk.edit(''); chunk = end > chunk.end ? this.byStart[chunk.end] : null; } return this; } reset(start, end) { while (start < 0) start += this.original.length; while (end < 0) end += this.original.length; if (start === end) return this; if (start < 0 || end > this.original.length) throw new Error('Character is out of bounds'); if (start > end) throw new Error('end must be greater than start'); this._split(start); this._split(end); let chunk = this.byStart[start]; while (chunk) { chunk.reset(); chunk = end > chunk.end ? this.byStart[chunk.end] : null; } return this; } lastChar() { if (this.outro.length) return this.outro[this.outro.length - 1]; let chunk = this.lastChunk; do { if (chunk.outro.length) return chunk.outro[chunk.outro.length - 1]; if (chunk.content.length) return chunk.content[chunk.content.length - 1]; if (chunk.intro.length) return chunk.intro[chunk.intro.length - 1]; } while ((chunk = chunk.previous)); if (this.intro.length) return this.intro[this.intro.length - 1]; return ''; } lastLine() { let lineIndex = this.outro.lastIndexOf(n$1); if (lineIndex !== -1) return this.outro.substr(lineIndex + 1); let lineStr = this.outro; let chunk = this.lastChunk; do { if (chunk.outro.length > 0) { lineIndex = chunk.outro.lastIndexOf(n$1); if (lineIndex !== -1) return chunk.outro.substr(lineIndex + 1) + lineStr; lineStr = chunk.outro + lineStr; } if (chunk.content.length > 0) { lineIndex = chunk.content.lastIndexOf(n$1); if (lineIndex !== -1) return chunk.content.substr(lineIndex + 1) + lineStr; lineStr = chunk.content + lineStr; } if (chunk.intro.length > 0) { lineIndex = chunk.intro.lastIndexOf(n$1); if (lineIndex !== -1) return chunk.intro.substr(lineIndex + 1) + lineStr; lineStr = chunk.intro + lineStr; } } while ((chunk = chunk.previous)); lineIndex = this.intro.lastIndexOf(n$1); if (lineIndex !== -1) return this.intro.substr(lineIndex + 1) + lineStr; return this.intro + lineStr; } slice(start = 0, end = this.original.length) { while (start < 0) start += this.original.length; while (end < 0) end += this.original.length; let result = ''; // find start chunk let chunk = this.firstChunk; while (chunk && (chunk.start > start || chunk.end <= start)) { // found end chunk before start if (chunk.start < end && chunk.end >= end) { return result; } chunk = chunk.next; } if (chunk && chunk.edited && chunk.start !== start) throw new Error(`Cannot use replaced character ${start} as slice start anchor.`); const startChunk = chunk; while (chunk) { if (chunk.intro && (startChunk !== chunk || chunk.start === start)) { result += chunk.intro; } const containsEnd = chunk.start < end && chunk.end >= end; if (containsEnd && chunk.edited && chunk.end !== end) throw new Error(`Cannot use replaced character ${end} as slice end anchor.`); const sliceStart = startChunk === chunk ? start - chunk.start : 0; const sliceEnd = containsEnd ? chunk.content.length + end - chunk.end : chunk.content.length; result += chunk.content.slice(sliceStart, sliceEnd); if (chunk.outro && (!containsEnd || chunk.end === end)) { result += chunk.outro; } if (containsEnd) { break; } chunk = chunk.next; } return result; } // TODO deprecate this? not really very useful snip(start, end) { const clone = this.clone(); clone.remove(0, start); clone.remove(end, clone.original.length); return clone; } _split(index) { if (this.byStart[index] || this.byEnd[index]) return; let chunk = this.lastSearchedChunk; const searchForward = index > chunk.end; while (chunk) { if (chunk.contains(index)) return this._splitChunk(chunk, index); chunk = searchForward ? this.byStart[chunk.end] : this.byEnd[chunk.start]; } } _splitChunk(chunk, index) { if (chunk.edited && chunk.content.length) { // zero-length edited chunks are a special case (overlapping replacements) const loc = getLocator(this.original)(index); throw new Error( `Cannot split a chunk that has already been edited (${loc.line}:${loc.column} – "${chunk.original}")`, ); } const newChunk = chunk.split(index); this.byEnd[index] = chunk; this.byStart[index] = newChunk; this.byEnd[newChunk.end] = newChunk; if (chunk === this.lastChunk) this.lastChunk = newChunk; this.lastSearchedChunk = chunk; return true; } toString() { let str = this.intro; let chunk = this.firstChunk; while (chunk) { str += chunk.toString(); chunk = chunk.next; } return str + this.outro; } isEmpty() { let chunk = this.firstChunk; do { if ( (chunk.intro.length && chunk.intro.trim()) || (chunk.content.length && chunk.content.trim()) || (chunk.outro.length && chunk.outro.trim()) ) return false; } while ((chunk = chunk.next)); return true; } length() { let chunk = this.firstChunk; let length = 0; do { length += chunk.intro.length + chunk.content.length + chunk.outro.length; } while ((chunk = chunk.next)); return length; } trimLines() { return this.trim('[\\r\\n]'); } trim(charType) { return this.trimStart(charType).trimEnd(charType); } trimEndAborted(charType) { const rx = new RegExp((charType || '\\s') + '+$'); this.outro = this.outro.replace(rx, ''); if (this.outro.length) return true; let chunk = this.lastChunk; do { const end = chunk.end; const aborted = chunk.trimEnd(rx); // if chunk was trimmed, we have a new lastChunk if (chunk.end !== end) { if (this.lastChunk === chunk) { this.lastChunk = chunk.next; } this.byEnd[chunk.end] = chunk; this.byStart[chunk.next.start] = chunk.next; this.byEnd[chunk.next.end] = chunk.next; } if (aborted) return true; chunk = chunk.previous; } while (chunk); return false; } trimEnd(charType) { this.trimEndAborted(charType); return this; } trimStartAborted(charType) { const rx = new RegExp('^' + (charType || '\\s') + '+'); this.intro = this.intro.replace(rx, ''); if (this.intro.length) return true; let chunk = this.firstChunk; do { const end = chunk.end; const aborted = chunk.trimStart(rx); if (chunk.end !== end) { // special case... if (chunk === this.lastChunk) this.lastChunk = chunk.next; this.byEnd[chunk.end] = chunk; this.byStart[chunk.next.start] = chunk.next; this.byEnd[chunk.next.end] = chunk.next; } if (aborted) return true; chunk = chunk.next; } while (chunk); return false; } trimStart(charType) { this.trimStartAborted(charType); return this; } hasChanged() { return this.original !== this.toString(); } _replaceRegexp(searchValue, replacement) { function getReplacement(match, str) { if (typeof replacement === 'string') { return replacement.replace(/\$(\$|&|\d+)/g, (_, i) => { // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/replace#specifying_a_string_as_a_parameter if (i === '$') return '$'; if (i === '&') return match[0]; const num = +i; if (num < match.length) return match[+i]; return `$${i}`; }); } else { return replacement(...match, match.index, str, match.groups); } } function matchAll(re, str) { let match; const matches = []; while ((match = re.exec(str))) { matches.push(match); } return matches; } if (searchValue.global) { const matches = matchAll(searchValue, this.original); matches.forEach((match) => { if (match.index != null) this.overwrite( match.index, match.index + match[0].length, getReplacement(match, this.original), ); }); } else { const match = this.original.match(searchValue); if (match && match.index != null) this.overwrite( match.index, match.index + match[0].length, getReplacement(match, this.original), ); } return this; } _replaceString(string, replacement) { const { original } = this; const index = original.indexOf(string); if (index !== -1) { this.overwrite(index, index + string.length, replacement); } return this; } replace(searchValue, replacement) { if (typeof searchValue === 'string') { return this._replaceString(searchValue, replacement); } return this._replaceRegexp(searchValue, replacement); } _replaceAllString(string, replacement) { const { original } = this; const stringLength = string.length; for ( let index = original.indexOf(string); index !== -1; index = original.indexOf(string, index + stringLength) ) { this.overwrite(index, index + stringLength, replacement); } return this; } replaceAll(searchValue, replacement) { if (typeof searchValue === 'string') { return this._replaceAllString(searchValue, replacement); } if (!searchValue.global) { throw new TypeError( 'MagicString.prototype.replaceAll called with a non-global RegExp argument', ); } return this._replaceRegexp(searchValue, replacement); } } function isReference(node, parent) { if (node.type === 'MemberExpression') { return !node.computed && isReference(node.object, node); } if (node.type === 'Identifier') { if (!parent) return true; switch (parent.type) { // disregard `bar` in `foo.bar` case 'MemberExpression': return parent.computed || node === parent.object; // disregard the `foo` in `class {foo(){}}` but keep it in `class {[foo](){}}` case 'MethodDefinition': return parent.computed; // disregard the `foo` in `class {foo=bar}` but keep it in `class {[foo]=bar}` and `class {bar=foo}` case 'FieldDefinition': return parent.computed || node === parent.value; // disregard the `bar` in `{ bar: foo }`, but keep it in `{ [bar]: foo }` case 'Property': return parent.computed || node === parent.value; // disregard the `bar` in `export { foo as bar }` or // the foo in `import { foo as bar }` case 'ExportSpecifier': case 'ImportSpecifier': return node === parent.local; // disregard the `foo` in `foo: while (...) { ... break foo; ... continue foo;}` case 'LabeledStatement': case 'BreakStatement': case 'ContinueStatement': return false; default: return true; } } return false; } var version$3 = "25.0.7"; var peerDependencies = { rollup: "^2.68.0||^3.0.0||^4.0.0" }; function tryParse(parse, code, id) { try { return parse(code, { allowReturnOutsideFunction: true }); } catch (err) { err.message += ` in ${id}`; throw err; } } const firstpassGlobal = /\b(?:require|module|exports|global)\b/; const firstpassNoGlobal = /\b(?:require|module|exports)\b/; function hasCjsKeywords(code, ignoreGlobal) { const firstpass = ignoreGlobal ? firstpassNoGlobal : firstpassGlobal; return firstpass.test(code); } /* eslint-disable no-underscore-dangle */ function analyzeTopLevelStatements(parse, code, id) { const ast = tryParse(parse, code, id); let isEsModule = false; let hasDefaultExport = false; let hasNamedExports = false; for (const node of ast.body) { switch (node.type) { case 'ExportDefaultDeclaration': isEsModule = true; hasDefaultExport = true; break; case 'ExportNamedDeclaration': isEsModule = true; if (node.declaration) { hasNamedExports = true; } else { for (const specifier of node.specifiers) { if (specifier.exported.name === 'default') { hasDefaultExport = true; } else { hasNamedExports = true; } } } break; case 'ExportAllDeclaration': isEsModule = true; if (node.exported && node.exported.name === 'default') { hasDefaultExport = true; } else { hasNamedExports = true; } break; case 'ImportDeclaration': isEsModule = true; break; } } return { isEsModule, hasDefaultExport, hasNamedExports, ast }; } /* eslint-disable import/prefer-default-export */ function deconflict(scopes, globals, identifier) { let i = 1; let deconflicted = makeLegalIdentifier(identifier); const hasConflicts = () => scopes.some((scope) => scope.contains(deconflicted)) || globals.has(deconflicted); while (hasConflicts()) { deconflicted = makeLegalIdentifier(`${identifier}_${i}`); i += 1; } for (const scope of scopes) { scope.declarations[deconflicted] = true; } return deconflicted; } function getName(id) { const name = makeLegalIdentifier(basename$1(id, extname(id))); if (name !== 'index') { return name; } return makeLegalIdentifier(basename$1(dirname$1(id))); } function normalizePathSlashes(path) { return path.replace(/\\/g, '/'); } const getVirtualPathForDynamicRequirePath = (path, commonDir) => `/${normalizePathSlashes(relative$1(commonDir, path))}`; function capitalize(name) { return name[0].toUpperCase() + name.slice(1); } function getStrictRequiresFilter({ strictRequires }) { switch (strictRequires) { case true: return { strictRequiresFilter: () => true, detectCyclesAndConditional: false }; // eslint-disable-next-line no-undefined case undefined: case 'auto': case 'debug': case null: return { strictRequiresFilter: () => false, detectCyclesAndConditional: true }; case false: return { strictRequiresFilter: () => false, detectCyclesAndConditional: false }; default: if (typeof strictRequires === 'string' || Array.isArray(strictRequires)) { return { strictRequiresFilter: createFilter$1(strictRequires), detectCyclesAndConditional: false }; } throw new Error('Unexpected value for "strictRequires" option.'); } } function getPackageEntryPoint(dirPath) { let entryPoint = 'index.js'; try { if (existsSync(join$1(dirPath, 'package.json'))) { entryPoint = JSON.parse(readFileSync(join$1(dirPath, 'package.json'), { encoding: 'utf8' })).main || entryPoint; } } catch (ignored) { // ignored } return entryPoint; } function isDirectory$1(path) { try { if (statSync$1(path).isDirectory()) return true; } catch (ignored) { // Nothing to do here } return false; } function getDynamicRequireModules(patterns, dynamicRequireRoot) { const dynamicRequireModules = new Map(); const dirNames = new Set(); for (const pattern of !patterns || Array.isArray(patterns) ? patterns || [] : [patterns]) { const isNegated = pattern.startsWith('!'); const modifyMap = (targetPath, resolvedPath) => isNegated ? dynamicRequireModules.delete(targetPath) : dynamicRequireModules.set(targetPath, resolvedPath); for (const path of glob$1.sync(isNegated ? pattern.substr(1) : pattern)) { const resolvedPath = resolve$3(path); const requirePath = normalizePathSlashes(resolvedPath); if (isDirectory$1(resolvedPath)) { dirNames.add(resolvedPath); const modulePath = resolve$3(join$1(resolvedPath, getPackageEntryPoint(path))); modifyMap(requirePath, modulePath); modifyMap(normalizePathSlashes(modulePath), modulePath); } else { dirNames.add(dirname$1(resolvedPath)); modifyMap(requirePath, resolvedPath); } } } return { commonDir: dirNames.size ? getCommonDir([...dirNames, dynamicRequireRoot]) : null, dynamicRequireModules }; } const FAILED_REQUIRE_ERROR = `throw new Error('Could not dynamically require "' + path + '". Please configure the dynamicRequireTargets or/and ignoreDynamicRequires option of @rollup/plugin-commonjs appropriately for this require call to work.');`; const COMMONJS_REQUIRE_EXPORT = 'commonjsRequire'; const CREATE_COMMONJS_REQUIRE_EXPORT = 'createCommonjsRequire'; function getDynamicModuleRegistry( isDynamicRequireModulesEnabled, dynamicRequireModules, commonDir, ignoreDynamicRequires ) { if (!isDynamicRequireModulesEnabled) { return `export function ${COMMONJS_REQUIRE_EXPORT}(path) { ${FAILED_REQUIRE_ERROR} }`; } const dynamicModuleImports = [...dynamicRequireModules.values()] .map( (id, index) => `import ${ id.endsWith('.json') ? `json${index}` : `{ __require as require${index} }` } from ${JSON.stringify(id)};` ) .join('\n'); const dynamicModuleProps = [...dynamicRequireModules.keys()] .map( (id, index) => `\t\t${JSON.stringify(getVirtualPathForDynamicRequirePath(id, commonDir))}: ${ id.endsWith('.json') ? `function () { return json${index}; }` : `require${index}` }` ) .join(',\n'); return `${dynamicModuleImports} var dynamicModules; function getDynamicModules() { return dynamicModules || (dynamicModules = { ${dynamicModuleProps} }); } export function ${CREATE_COMMONJS_REQUIRE_EXPORT}(originalModuleDir) { function handleRequire(path) { var resolvedPath = commonjsResolve(path, originalModuleDir); if (resolvedPath !== null) { return getDynamicModules()[resolvedPath](); } ${ignoreDynamicRequires ? 'return require(path);' : FAILED_REQUIRE_ERROR} } handleRequire.resolve = function (path) { var resolvedPath = commonjsResolve(path, originalModuleDir); if (resolvedPath !== null) { return resolvedPath; } return require.resolve(path); } return handleRequire; } function commonjsResolve (path, originalModuleDir) { var shouldTryNodeModules = isPossibleNodeModulesPath(path); path = normalize(path); var relPath; if (path[0] === '/') { originalModuleDir = ''; } var modules = getDynamicModules(); var checkedExtensions = ['', '.js', '.json']; while (true) { if (!shouldTryNodeModules) { relPath = normalize(originalModuleDir + '/' + path); } else { relPath = normalize(originalModuleDir + '/node_modules/' + path); } if (relPath.endsWith('/..')) { break; // Travelled too far up, avoid infinite loop } for (var extensionIndex = 0; extensionIndex < checkedExtensions.length; extensionIndex++) { var resolvedPath = relPath + checkedExtensions[extensionIndex]; if (modules[resolvedPath]) { return resolvedPath; } } if (!shouldTryNodeModules) break; var nextDir = normalize(originalModuleDir + '/..'); if (nextDir === originalModuleDir) break; originalModuleDir = nextDir; } return null; } function isPossibleNodeModulesPath (modulePath) { var c0 = modulePath[0]; if (c0 === '/' || c0 === '\\\\') return false; var c1 = modulePath[1], c2 = modulePath[2]; if ((c0 === '.' && (!c1 || c1 === '/' || c1 === '\\\\')) || (c0 === '.' && c1 === '.' && (!c2 || c2 === '/' || c2 === '\\\\'))) return false; if (c1 === ':' && (c2 === '/' || c2 === '\\\\')) return false; return true; } function normalize (path) { path = path.replace(/\\\\/g, '/'); var parts = path.split('/'); var slashed = parts[0] === ''; for (var i = 1; i < parts.length; i++) { if (parts[i] === '.' || parts[i] === '') { parts.splice(i--, 1); } } for (var i = 1; i < parts.length; i++) { if (parts[i] !== '..') continue; if (i > 0 && parts[i - 1] !== '..' && parts[i - 1] !== '.') { parts.splice(--i, 2); i--; } } path = parts.join('/'); if (slashed && path[0] !== '/') path = '/' + path; else if (path.length === 0) path = '.'; return path; }`; } const isWrappedId = (id, suffix) => id.endsWith(suffix); const wrapId = (id, suffix) => `\0${id}${suffix}`; const unwrapId = (wrappedId, suffix) => wrappedId.slice(1, -suffix.length); const PROXY_SUFFIX = '?commonjs-proxy'; const WRAPPED_SUFFIX = '?commonjs-wrapped'; const EXTERNAL_SUFFIX = '?commonjs-external'; const EXPORTS_SUFFIX = '?commonjs-exports'; const MODULE_SUFFIX = '?commonjs-module'; const ENTRY_SUFFIX = '?commonjs-entry'; const ES_IMPORT_SUFFIX = '?commonjs-es-import'; const DYNAMIC_MODULES_ID = '\0commonjs-dynamic-modules'; const HELPERS_ID = '\0commonjsHelpers.js'; const IS_WRAPPED_COMMONJS = 'withRequireFunction'; // `x['default']` is used instead of `x.default` for backward compatibility with ES3 browsers. // Minifiers like uglify will usually transpile it back if compatibility with ES3 is not enabled. // This could be improved by inspecting Rollup's "generatedCode" option const HELPERS = ` export var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {}; export function getDefaultExportFromCjs (x) { return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x; } export function getDefaultExportFromNamespaceIfPresent (n) { return n && Object.prototype.hasOwnProperty.call(n, 'default') ? n['default'] : n; } export function getDefaultExportFromNamespaceIfNotNamed (n) { return n && Object.prototype.hasOwnProperty.call(n, 'default') && Object.keys(n).length === 1 ? n['default'] : n; } export function getAugmentedNamespace(n) { if (n.__esModule) return n; var f = n.default; if (typeof f == "function") { var a = function a () { if (this instanceof a) { return Reflect.construct(f, arguments, this.constructor); } return f.apply(this, arguments); }; a.prototype = f.prototype; } else a = {}; Object.defineProperty(a, '__esModule', {value: true}); Object.keys(n).forEach(function (k) { var d = Object.getOwnPropertyDescriptor(n, k); Object.defineProperty(a, k, d.get ? d : { enumerable: true, get: function () { return n[k]; } }); }); return a; } `; function getHelpersModule() { return HELPERS; } function getUnknownRequireProxy(id, requireReturnsDefault) { if (requireReturnsDefault === true || id.endsWith('.json')) { return `export { default } from ${JSON.stringify(id)};`; } const name = getName(id); const exported = requireReturnsDefault === 'auto' ? `import { getDefaultExportFromNamespaceIfNotNamed } from "${HELPERS_ID}"; export default /*@__PURE__*/getDefaultExportFromNamespaceIfNotNamed(${name});` : requireReturnsDefault === 'preferred' ? `import { getDefaultExportFromNamespaceIfPresent } from "${HELPERS_ID}"; export default /*@__PURE__*/getDefaultExportFromNamespaceIfPresent(${name});` : !requireReturnsDefault ? `import { getAugmentedNamespace } from "${HELPERS_ID}"; export default /*@__PURE__*/getAugmentedNamespace(${name});` : `export default ${name};`; return `import * as ${name} from ${JSON.stringify(id)}; ${exported}`; } async function getStaticRequireProxy(id, requireReturnsDefault, loadModule) { const name = getName(id); const { meta: { commonjs: commonjsMeta } } = await loadModule({ id }); if (!commonjsMeta) { return getUnknownRequireProxy(id, requireReturnsDefault); } if (commonjsMeta.isCommonJS) { return `export { __moduleExports as default } from ${JSON.stringify(id)};`; } if (!requireReturnsDefault) { return `import { getAugmentedNamespace } from "${HELPERS_ID}"; import * as ${name} from ${JSON.stringify( id )}; export default /*@__PURE__*/getAugmentedNamespace(${name});`; } if ( requireReturnsDefault !== true && (requireReturnsDefault === 'namespace' || !commonjsMeta.hasDefaultExport || (requireReturnsDefault === 'auto' && commonjsMeta.hasNamedExports)) ) { return `import * as ${name} from ${JSON.stringify(id)}; export default ${name};`; } return `export { default } from ${JSON.stringify(id)};`; } function getEntryProxy(id, defaultIsModuleExports, getModuleInfo, shebang) { const { meta: { commonjs: commonjsMeta }, hasDefaultExport } = getModuleInfo(id); if (!commonjsMeta || commonjsMeta.isCommonJS !== IS_WRAPPED_COMMONJS) { const stringifiedId = JSON.stringify(id); let code = `export * from ${stringifiedId};`; if (hasDefaultExport) { code += `export { default } from ${stringifiedId};`; } return shebang + code; } const result = getEsImportProxy(id, defaultIsModuleExports); return { ...result, code: shebang + result.code }; } function getEsImportProxy(id, defaultIsModuleExports) { const name = getName(id); const exportsName = `${name}Exports`; const requireModule = `require${capitalize(name)}`; let code = `import { getDefaultExportFromCjs } from "${HELPERS_ID}";\n` + `import { __require as ${requireModule} } from ${JSON.stringify(id)};\n` + `var ${exportsName} = ${requireModule}();\n` + `export { ${exportsName} as __moduleExports };`; if (defaultIsModuleExports === true) { code += `\nexport { ${exportsName} as default };`; } else { code += `export default /*@__PURE__*/getDefaultExportFromCjs(${exportsName});`; } return { code, syntheticNamedExports: '__moduleExports' }; } /* eslint-disable no-param-reassign, no-undefined */ function getCandidatesForExtension(resolved, extension) { return [resolved + extension, `${resolved}${sep}index${extension}`]; } function getCandidates(resolved, extensions) { return extensions.reduce( (paths, extension) => paths.concat(getCandidatesForExtension(resolved, extension)), [resolved] ); } function resolveExtensions(importee, importer, extensions) { // not our problem if (importee[0] !== '.' || !importer) return undefined; const resolved = resolve$3(dirname$1(importer), importee); const candidates = getCandidates(resolved, extensions); for (let i = 0; i < candidates.length; i += 1) { try { const stats = statSync$1(candidates[i]); if (stats.isFile()) return { id: candidates[i] }; } catch (err) { /* noop */ } } return undefined; } function getResolveId(extensions, isPossibleCjsId) { const currentlyResolving = new Map(); return { /** * This is a Maps of importers to Sets of require sources being resolved at * the moment by resolveRequireSourcesAndUpdateMeta */ currentlyResolving, async resolveId(importee, importer, resolveOptions) { const customOptions = resolveOptions.custom; // All logic below is specific to ES imports. // Also, if we do not skip this logic for requires that are resolved while // transforming a commonjs file, it can easily lead to deadlocks. if ( customOptions && customOptions['node-resolve'] && customOptions['node-resolve'].isRequire ) { return null; } const currentlyResolvingForParent = currentlyResolving.get(importer); if (currentlyResolvingForParent && currentlyResolvingForParent.has(importee)) { this.warn({ code: 'THIS_RESOLVE_WITHOUT_OPTIONS', message: 'It appears a plugin has implemented a "resolveId" hook that uses "this.resolve" without forwarding the third "options" parameter of "resolveId". This is problematic as it can lead to wrong module resolutions especially for the node-resolve plugin and in certain cases cause early exit errors for the commonjs plugin.\nIn rare cases, this warning can appear if the same file is both imported and required from the same mixed ES/CommonJS module, in which case it can be ignored.', url: 'https://rollupjs.org/guide/en/#resolveid' }); return null; } if (isWrappedId(importee, WRAPPED_SUFFIX)) { return unwrapId(importee, WRAPPED_SUFFIX); } if ( importee.endsWith(ENTRY_SUFFIX) || isWrappedId(importee, MODULE_SUFFIX) || isWrappedId(importee, EXPORTS_SUFFIX) || isWrappedId(importee, PROXY_SUFFIX) || isWrappedId(importee, ES_IMPORT_SUFFIX) || isWrappedId(importee, EXTERNAL_SUFFIX) || importee.startsWith(HELPERS_ID) || importee === DYNAMIC_MODULES_ID ) { return importee; } if (importer) { if ( importer === DYNAMIC_MODULES_ID || // Proxies are only importing resolved ids, no need to resolve again isWrappedId(importer, PROXY_SUFFIX) || isWrappedId(importer, ES_IMPORT_SUFFIX) || importer.endsWith(ENTRY_SUFFIX) ) { return importee; } if (isWrappedId(importer, EXTERNAL_SUFFIX)) { // We need to return null for unresolved imports so that the proper warning is shown if ( !(await this.resolve( importee, importer, Object.assign({ skipSelf: true }, resolveOptions) )) ) { return null; } // For other external imports, we need to make sure they are handled as external return { id: importee, external: true }; } } if (importee.startsWith('\0')) { return null; } // If this is an entry point or ESM import, we need to figure out if the importee is wrapped and // if that is the case, we need to add a proxy. const resolved = (await this.resolve( importee, importer, Object.assign({ skipSelf: true }, resolveOptions) )) || resolveExtensions(importee, importer, extensions); // Make sure that even if other plugins resolve again, we ignore our own proxies if ( !resolved || resolved.external || resolved.id.endsWith(ENTRY_SUFFIX) || isWrappedId(resolved.id, ES_IMPORT_SUFFIX) || !isPossibleCjsId(resolved.id) ) { return resolved; } const moduleInfo = await this.load(resolved); const { meta: { commonjs: commonjsMeta } } = moduleInfo; if (commonjsMeta) { const { isCommonJS } = commonjsMeta; if (isCommonJS) { if (resolveOptions.isEntry) { moduleInfo.moduleSideEffects = true; // We must not precede entry proxies with a `\0` as that will mess up relative external resolution return resolved.id + ENTRY_SUFFIX; } if (isCommonJS === IS_WRAPPED_COMMONJS) { return { id: wrapId(resolved.id, ES_IMPORT_SUFFIX), meta: { commonjs: { resolved } } }; } } } return resolved; } }; } function getRequireResolver(extensions, detectCyclesAndConditional, currentlyResolving) { const knownCjsModuleTypes = Object.create(null); const requiredIds = Object.create(null); const unconditionallyRequiredIds = Object.create(null); const dependencies = Object.create(null); const getDependencies = (id) => dependencies[id] || (dependencies[id] = new Set()); const isCyclic = (id) => { const dependenciesToCheck = new Set(getDependencies(id)); for (const dependency of dependenciesToCheck) { if (dependency === id) { return true; } for (const childDependency of getDependencies(dependency)) { dependenciesToCheck.add(childDependency); } } return false; }; // Once a module is listed here, its type (wrapped or not) is fixed and may // not change for the rest of the current build, to not break already // transformed modules. const fullyAnalyzedModules = Object.create(null); const getTypeForFullyAnalyzedModule = (id) => { const knownType = knownCjsModuleTypes[id]; if (knownType !== true || !detectCyclesAndConditional || fullyAnalyzedModules[id]) { return knownType; } if (isCyclic(id)) { return (knownCjsModuleTypes[id] = IS_WRAPPED_COMMONJS); } return knownType; }; const setInitialParentType = (id, initialCommonJSType) => { // Fully analyzed modules may never change type if (fullyAnalyzedModules[id]) { return; } knownCjsModuleTypes[id] = initialCommonJSType; if ( detectCyclesAndConditional && knownCjsModuleTypes[id] === true && requiredIds[id] && !unconditionallyRequiredIds[id] ) { knownCjsModuleTypes[id] = IS_WRAPPED_COMMONJS; } }; const analyzeRequiredModule = async (parentId, resolved, isConditional, loadModule) => { const childId = resolved.id; requiredIds[childId] = true; if (!(isConditional || knownCjsModuleTypes[parentId] === IS_WRAPPED_COMMONJS)) { unconditionallyRequiredIds[childId] = true; } getDependencies(parentId).add(childId); if (!isCyclic(childId)) { // This makes sure the current transform handler waits for all direct // dependencies to be loaded and transformed and therefore for all // transitive CommonJS dependencies to be loaded as well so that all // cycles have been found and knownCjsModuleTypes is reliable. await loadModule(resolved); } }; const getTypeForImportedModule = async (resolved, loadModule) => { if (resolved.id in knownCjsModuleTypes) { // This handles cyclic ES dependencies return knownCjsModuleTypes[resolved.id]; } const { meta: { commonjs } } = await loadModule(resolved); return (commonjs && commonjs.isCommonJS) || false; }; return { getWrappedIds: () => Object.keys(knownCjsModuleTypes).filter( (id) => knownCjsModuleTypes[id] === IS_WRAPPED_COMMONJS ), isRequiredId: (id) => requiredIds[id], async shouldTransformCachedModule({ id: parentId, resolvedSources, meta: { commonjs: parentMeta } }) { // We explicitly track ES modules to handle circular imports if (!(parentMeta && parentMeta.isCommonJS)) knownCjsModuleTypes[parentId] = false; if (isWrappedId(parentId, ES_IMPORT_SUFFIX)) return false; const parentRequires = parentMeta && parentMeta.requires; if (parentRequires) { setInitialParentType(parentId, parentMeta.initialCommonJSType); await Promise.all( parentRequires.map(({ resolved, isConditional }) => analyzeRequiredModule(parentId, resolved, isConditional, this.load) ) ); if (getTypeForFullyAnalyzedModule(parentId) !== parentMeta.isCommonJS) { return true; } for (const { resolved: { id } } of parentRequires) { if (getTypeForFullyAnalyzedModule(id) !== parentMeta.isRequiredCommonJS[id]) { return true; } } // Now that we decided to go with the cached copy, neither the parent // module nor any of its children may change types anymore fullyAnalyzedModules[parentId] = true; for (const { resolved: { id } } of parentRequires) { fullyAnalyzedModules[id] = true; } } const parentRequireSet = new Set((parentRequires || []).map(({ resolved: { id } }) => id)); return ( await Promise.all( Object.keys(resolvedSources) .map((source) => resolvedSources[source]) .filter(({ id, external }) => !(external || parentRequireSet.has(id))) .map(async (resolved) => { if (isWrappedId(resolved.id, ES_IMPORT_SUFFIX)) { return ( (await getTypeForImportedModule( ( await this.load({ id: resolved.id }) ).meta.commonjs.resolved, this.load )) !== IS_WRAPPED_COMMONJS ); } return (await getTypeForImportedModule(resolved, this.load)) === IS_WRAPPED_COMMONJS; }) ) ).some((shouldTransform) => shouldTransform); }, /* eslint-disable no-param-reassign */ resolveRequireSourcesAndUpdateMeta: (rollupContext) => async (parentId, isParentCommonJS, parentMeta, sources) => { parentMeta.initialCommonJSType = isParentCommonJS; parentMeta.requires = []; parentMeta.isRequiredCommonJS = Object.create(null); setInitialParentType(parentId, isParentCommonJS); const currentlyResolvingForParent = currentlyResolving.get(parentId) || new Set(); currentlyResolving.set(parentId, currentlyResolvingForParent); const requireTargets = await Promise.all( sources.map(async ({ source, isConditional }) => { // Never analyze or proxy internal modules if (source.startsWith('\0')) { return { id: source, allowProxy: false }; } currentlyResolvingForParent.add(source); const resolved = (await rollupContext.resolve(source, parentId, { skipSelf: false, custom: { 'node-resolve': { isRequire: true } } })) || resolveExtensions(source, parentId, extensions); currentlyResolvingForParent.delete(source); if (!resolved) { return { id: wrapId(source, EXTERNAL_SUFFIX), allowProxy: false }; } const childId = resolved.id; if (resolved.external) { return { id: wrapId(childId, EXTERNAL_SUFFIX), allowProxy: false }; } parentMeta.requires.push({ resolved, isConditional }); await analyzeRequiredModule(parentId, resolved, isConditional, rollupContext.load); return { id: childId, allowProxy: true }; }) ); parentMeta.isCommonJS = getTypeForFullyAnalyzedModule(parentId); fullyAnalyzedModules[parentId] = true; return requireTargets.map(({ id: dependencyId, allowProxy }, index) => { // eslint-disable-next-line no-multi-assign const isCommonJS = (parentMeta.isRequiredCommonJS[dependencyId] = getTypeForFullyAnalyzedModule(dependencyId)); fullyAnalyzedModules[dependencyId] = true; return { source: sources[index].source, id: allowProxy ? isCommonJS === IS_WRAPPED_COMMONJS ? wrapId(dependencyId, WRAPPED_SUFFIX) : wrapId(dependencyId, PROXY_SUFFIX) : dependencyId, isCommonJS }; }); }, isCurrentlyResolving(source, parentId) { const currentlyResolvingForParent = currentlyResolving.get(parentId); return currentlyResolvingForParent && currentlyResolvingForParent.has(source); } }; } function validateVersion(actualVersion, peerDependencyVersion, name) { const versionRegexp = /\^(\d+\.\d+\.\d+)/g; let minMajor = Infinity; let minMinor = Infinity; let minPatch = Infinity; let foundVersion; // eslint-disable-next-line no-cond-assign while ((foundVersion = versionRegexp.exec(peerDependencyVersion))) { const [foundMajor, foundMinor, foundPatch] = foundVersion[1].split('.').map(Number); if (foundMajor < minMajor) { minMajor = foundMajor; minMinor = foundMinor; minPatch = foundPatch; } } if (!actualVersion) { throw new Error( `Insufficient ${name} version: "@rollup/plugin-commonjs" requires at least ${name}@${minMajor}.${minMinor}.${minPatch}.` ); } const [major, minor, patch] = actualVersion.split('.').map(Number); if ( major < minMajor || (major === minMajor && (minor < minMinor || (minor === minMinor && patch < minPatch))) ) { throw new Error( `Insufficient ${name} version: "@rollup/plugin-commonjs" requires at least ${name}@${minMajor}.${minMinor}.${minPatch} but found ${name}@${actualVersion}.` ); } } const operators = { '==': (x) => equals(x.left, x.right, false), '!=': (x) => not(operators['=='](x)), '===': (x) => equals(x.left, x.right, true), '!==': (x) => not(operators['==='](x)), '!': (x) => isFalsy(x.argument), '&&': (x) => isTruthy(x.left) && isTruthy(x.right), '||': (x) => isTruthy(x.left) || isTruthy(x.right) }; function not(value) { return value === null ? value : !value; } function equals(a, b, strict) { if (a.type !== b.type) return null; // eslint-disable-next-line eqeqeq if (a.type === 'Literal') return strict ? a.value === b.value : a.value == b.value; return null; } function isTruthy(node) { if (!node) return false; if (node.type === 'Literal') return !!node.value; if (node.type === 'ParenthesizedExpression') return isTruthy(node.expression); if (node.operator in operators) return operators[node.operator](node); return null; } function isFalsy(node) { return not(isTruthy(node)); } function getKeypath(node) { const parts = []; while (node.type === 'MemberExpression') { if (node.computed) return null; parts.unshift(node.property.name); // eslint-disable-next-line no-param-reassign node = node.object; } if (node.type !== 'Identifier') return null; const { name } = node; parts.unshift(name); return { name, keypath: parts.join('.') }; } const KEY_COMPILED_ESM = '__esModule'; function getDefineCompiledEsmType(node) { const definedPropertyWithExports = getDefinePropertyCallName(node, 'exports'); const definedProperty = definedPropertyWithExports || getDefinePropertyCallName(node, 'module.exports'); if (definedProperty && definedProperty.key === KEY_COMPILED_ESM) { return isTruthy(definedProperty.value) ? definedPropertyWithExports ? 'exports' : 'module' : false; } return false; } function getDefinePropertyCallName(node, targetName) { const { callee: { object, property } } = node; if (!object || object.type !== 'Identifier' || object.name !== 'Object') return; if (!property || property.type !== 'Identifier' || property.name !== 'defineProperty') return; if (node.arguments.length !== 3) return; const targetNames = targetName.split('.'); const [target, key, value] = node.arguments; if (targetNames.length === 1) { if (target.type !== 'Identifier' || target.name !== targetNames[0]) { return; } } if (targetNames.length === 2) { if ( target.type !== 'MemberExpression' || target.object.name !== targetNames[0] || target.property.name !== targetNames[1] ) { return; } } if (value.type !== 'ObjectExpression' || !value.properties) return; const valueProperty = value.properties.find((p) => p.key && p.key.name === 'value'); if (!valueProperty || !valueProperty.value) return; // eslint-disable-next-line consistent-return return { key: key.value, value: valueProperty.value }; } function isShorthandProperty(parent) { return parent && parent.type === 'Property' && parent.shorthand; } function wrapCode(magicString, uses, moduleName, exportsName, indentExclusionRanges) { const args = []; const passedArgs = []; if (uses.module) { args.push('module'); passedArgs.push(moduleName); } if (uses.exports) { args.push('exports'); passedArgs.push(uses.module ? `${moduleName}.exports` : exportsName); } magicString .trim() .indent('\t', { exclude: indentExclusionRanges }) .prepend(`(function (${args.join(', ')}) {\n`) // For some reason, this line is only indented correctly when using a // require-wrapper if we have this leading space .append(` \n} (${passedArgs.join(', ')}));`); } function rewriteExportsAndGetExportsBlock( magicString, moduleName, exportsName, exportedExportsName, wrapped, moduleExportsAssignments, firstTopLevelModuleExportsAssignment, exportsAssignmentsByName, topLevelAssignments, defineCompiledEsmExpressions, deconflictedExportNames, code, HELPERS_NAME, exportMode, defaultIsModuleExports, usesRequireWrapper, requireName ) { const exports = []; const exportDeclarations = []; if (usesRequireWrapper) { getExportsWhenUsingRequireWrapper( magicString, wrapped, exportMode, exports, moduleExportsAssignments, exportsAssignmentsByName, moduleName, exportsName, requireName, defineCompiledEsmExpressions ); } else if (exportMode === 'replace') { getExportsForReplacedModuleExports( magicString, exports, exportDeclarations, moduleExportsAssignments, firstTopLevelModuleExportsAssignment, exportsName, defaultIsModuleExports, HELPERS_NAME ); } else { if (exportMode === 'module') { exportDeclarations.push(`var ${exportedExportsName} = ${moduleName}.exports`); exports.push(`${exportedExportsName} as __moduleExports`); } else { exports.push(`${exportsName} as __moduleExports`); } if (wrapped) { exportDeclarations.push( getDefaultExportDeclaration(exportedExportsName, defaultIsModuleExports, HELPERS_NAME) ); } else { getExports( magicString, exports, exportDeclarations, moduleExportsAssignments, exportsAssignmentsByName, deconflictedExportNames, topLevelAssignments, moduleName, exportsName, exportedExportsName, defineCompiledEsmExpressions, HELPERS_NAME, defaultIsModuleExports, exportMode ); } } if (exports.length) { exportDeclarations.push(`export { ${exports.join(', ')} }`); } return `\n\n${exportDeclarations.join(';\n')};`; } function getExportsWhenUsingRequireWrapper( magicString, wrapped, exportMode, exports, moduleExportsAssignments, exportsAssignmentsByName, moduleName, exportsName, requireName, defineCompiledEsmExpressions ) { exports.push(`${requireName} as __require`); if (wrapped) return; if (exportMode === 'replace') { rewriteModuleExportsAssignments(magicString, moduleExportsAssignments, exportsName); } else { rewriteModuleExportsAssignments(magicString, moduleExportsAssignments, `${moduleName}.exports`); // Collect and rewrite named exports for (const [exportName, { nodes }] of exportsAssignmentsByName) { for (const { node, type } of nodes) { magicString.overwrite( node.start, node.left.end, `${ exportMode === 'module' && type === 'module' ? `${moduleName}.exports` : exportsName }.${exportName}` ); } } replaceDefineCompiledEsmExpressionsAndGetIfRestorable( defineCompiledEsmExpressions, magicString, exportMode, moduleName, exportsName ); } } function getExportsForReplacedModuleExports( magicString, exports, exportDeclarations, moduleExportsAssignments, firstTopLevelModuleExportsAssignment, exportsName, defaultIsModuleExports, HELPERS_NAME ) { for (const { left } of moduleExportsAssignments) { magicString.overwrite(left.start, left.end, exportsName); } magicString.prependRight(firstTopLevelModuleExportsAssignment.left.start, 'var '); exports.push(`${exportsName} as __moduleExports`); exportDeclarations.push( getDefaultExportDeclaration(exportsName, defaultIsModuleExports, HELPERS_NAME) ); } function getDefaultExportDeclaration(exportedExportsName, defaultIsModuleExports, HELPERS_NAME) { return `export default ${ defaultIsModuleExports === true ? exportedExportsName : defaultIsModuleExports === false ? `${exportedExportsName}.default` : `/*@__PURE__*/${HELPERS_NAME}.getDefaultExportFromCjs(${exportedExportsName})` }`; } function getExports( magicString, exports, exportDeclarations, moduleExportsAssignments, exportsAssignmentsByName, deconflictedExportNames, topLevelAssignments, moduleName, exportsName, exportedExportsName, defineCompiledEsmExpressions, HELPERS_NAME, defaultIsModuleExports, exportMode ) { let deconflictedDefaultExportName; // Collect and rewrite module.exports assignments for (const { left } of moduleExportsAssignments) { magicString.overwrite(left.start, left.end, `${moduleName}.exports`); } // Collect and rewrite named exports for (const [exportName, { nodes }] of exportsAssignmentsByName) { const deconflicted = deconflictedExportNames[exportName]; let needsDeclaration = true; for (const { node, type } of nodes) { let replacement = `${deconflicted} = ${ exportMode === 'module' && type === 'module' ? `${moduleName}.exports` : exportsName }.${exportName}`; if (needsDeclaration && topLevelAssignments.has(node)) { replacement = `var ${replacement}`; needsDeclaration = false; } magicString.overwrite(node.start, node.left.end, replacement); } if (needsDeclaration) { magicString.prepend(`var ${deconflicted};\n`); } if (exportName === 'default') { deconflictedDefaultExportName = deconflicted; } else { exports.push(exportName === deconflicted ? exportName : `${deconflicted} as ${exportName}`); } } const isRestorableCompiledEsm = replaceDefineCompiledEsmExpressionsAndGetIfRestorable( defineCompiledEsmExpressions, magicString, exportMode, moduleName, exportsName ); if ( defaultIsModuleExports === false || (defaultIsModuleExports === 'auto' && isRestorableCompiledEsm && moduleExportsAssignments.length === 0) ) { // If there is no deconflictedDefaultExportName, then we use the namespace as // fallback because there can be no "default" property on the namespace exports.push(`${deconflictedDefaultExportName || exportedExportsName} as default`); } else if ( defaultIsModuleExports === true || (!isRestorableCompiledEsm && moduleExportsAssignments.length === 0) ) { exports.push(`${exportedExportsName} as default`); } else { exportDeclarations.push( getDefaultExportDeclaration(exportedExportsName, defaultIsModuleExports, HELPERS_NAME) ); } } function rewriteModuleExportsAssignments(magicString, moduleExportsAssignments, exportsName) { for (const { left } of moduleExportsAssignments) { magicString.overwrite(left.start, left.end, exportsName); } } function replaceDefineCompiledEsmExpressionsAndGetIfRestorable( defineCompiledEsmExpressions, magicString, exportMode, moduleName, exportsName ) { let isRestorableCompiledEsm = false; for (const { node, type } of defineCompiledEsmExpressions) { isRestorableCompiledEsm = true; const moduleExportsExpression = node.type === 'CallExpression' ? node.arguments[0] : node.left.object; magicString.overwrite( moduleExportsExpression.start, moduleExportsExpression.end, exportMode === 'module' && type === 'module' ? `${moduleName}.exports` : exportsName ); } return isRestorableCompiledEsm; } function isRequireExpression(node, scope) { if (!node) return false; if (node.type !== 'CallExpression') return false; // Weird case of `require()` or `module.require()` without arguments if (node.arguments.length === 0) return false; return isRequire(node.callee, scope); } function isRequire(node, scope) { return ( (node.type === 'Identifier' && node.name === 'require' && !scope.contains('require')) || (node.type === 'MemberExpression' && isModuleRequire(node, scope)) ); } function isModuleRequire({ object, property }, scope) { return ( object.type === 'Identifier' && object.name === 'module' && property.type === 'Identifier' && property.name === 'require' && !scope.contains('module') ); } function hasDynamicArguments(node) { return ( node.arguments.length > 1 || (node.arguments[0].type !== 'Literal' && (node.arguments[0].type !== 'TemplateLiteral' || node.arguments[0].expressions.length > 0)) ); } const reservedMethod = { resolve: true, cache: true, main: true }; function isNodeRequirePropertyAccess(parent) { return parent && parent.property && reservedMethod[parent.property.name]; } function getRequireStringArg(node) { return node.arguments[0].type === 'Literal' ? node.arguments[0].value : node.arguments[0].quasis[0].value.cooked; } function getRequireHandlers() { const requireExpressions = []; function addRequireExpression( sourceId, node, scope, usesReturnValue, isInsideTryBlock, isInsideConditional, toBeRemoved ) { requireExpressions.push({ sourceId, node, scope, usesReturnValue, isInsideTryBlock, isInsideConditional, toBeRemoved }); } async function rewriteRequireExpressionsAndGetImportBlock( magicString, topLevelDeclarations, reassignedNames, helpersName, dynamicRequireName, moduleName, exportsName, id, exportMode, resolveRequireSourcesAndUpdateMeta, needsRequireWrapper, isEsModule, isDynamicRequireModulesEnabled, getIgnoreTryCatchRequireStatementMode, commonjsMeta ) { const imports = []; imports.push(`import * as ${helpersName} from "${HELPERS_ID}"`); if (dynamicRequireName) { imports.push( `import { ${ isDynamicRequireModulesEnabled ? CREATE_COMMONJS_REQUIRE_EXPORT : COMMONJS_REQUIRE_EXPORT } as ${dynamicRequireName} } from "${DYNAMIC_MODULES_ID}"` ); } if (exportMode === 'module') { imports.push( `import { __module as ${moduleName} } from ${JSON.stringify(wrapId(id, MODULE_SUFFIX))}`, `var ${exportsName} = ${moduleName}.exports` ); } else if (exportMode === 'exports') { imports.push( `import { __exports as ${exportsName} } from ${JSON.stringify(wrapId(id, EXPORTS_SUFFIX))}` ); } const requiresBySource = collectSources(requireExpressions); const requireTargets = await resolveRequireSourcesAndUpdateMeta( id, needsRequireWrapper ? IS_WRAPPED_COMMONJS : !isEsModule, commonjsMeta, Object.keys(requiresBySource).map((source) => { return { source, isConditional: requiresBySource[source].every((require) => require.isInsideConditional) }; }) ); processRequireExpressions( imports, requireTargets, requiresBySource, getIgnoreTryCatchRequireStatementMode, magicString ); return imports.length ? `${imports.join(';\n')};\n\n` : ''; } return { addRequireExpression, rewriteRequireExpressionsAndGetImportBlock }; } function collectSources(requireExpressions) { const requiresBySource = Object.create(null); for (const requireExpression of requireExpressions) { const { sourceId } = requireExpression; if (!requiresBySource[sourceId]) { requiresBySource[sourceId] = []; } const requires = requiresBySource[sourceId]; requires.push(requireExpression); } return requiresBySource; } function processRequireExpressions( imports, requireTargets, requiresBySource, getIgnoreTryCatchRequireStatementMode, magicString ) { const generateRequireName = getGenerateRequireName(); for (const { source, id: resolvedId, isCommonJS } of requireTargets) { const requires = requiresBySource[source]; const name = generateRequireName(requires); let usesRequired = false; let needsImport = false; for (const { node, usesReturnValue, toBeRemoved, isInsideTryBlock } of requires) { const { canConvertRequire, shouldRemoveRequire } = isInsideTryBlock && isWrappedId(resolvedId, EXTERNAL_SUFFIX) ? getIgnoreTryCatchRequireStatementMode(source) : { canConvertRequire: true, shouldRemoveRequire: false }; if (shouldRemoveRequire) { if (usesReturnValue) { magicString.overwrite(node.start, node.end, 'undefined'); } else { magicString.remove(toBeRemoved.start, toBeRemoved.end); } } else if (canConvertRequire) { needsImport = true; if (isCommonJS === IS_WRAPPED_COMMONJS) { magicString.overwrite(node.start, node.end, `${name}()`); } else if (usesReturnValue) { usesRequired = true; magicString.overwrite(node.start, node.end, name); } else { magicString.remove(toBeRemoved.start, toBeRemoved.end); } } } if (needsImport) { if (isCommonJS === IS_WRAPPED_COMMONJS) { imports.push(`import { __require as ${name} } from ${JSON.stringify(resolvedId)}`); } else { imports.push(`import ${usesRequired ? `${name} from ` : ''}${JSON.stringify(resolvedId)}`); } } } } function getGenerateRequireName() { let uid = 0; return (requires) => { let name; const hasNameConflict = ({ scope }) => scope.contains(name); do { name = `require$$${uid}`; uid += 1; } while (requires.some(hasNameConflict)); return name; }; } /* eslint-disable no-param-reassign, no-shadow, no-underscore-dangle, no-continue */ const exportsPattern = /^(?:module\.)?exports(?:\.([a-zA-Z_$][a-zA-Z_$0-9]*))?$/; const functionType = /^(?:FunctionDeclaration|FunctionExpression|ArrowFunctionExpression)$/; // There are three different types of CommonJS modules, described by their // "exportMode": // - exports: Only assignments to (module.)exports properties // - replace: A single assignment to module.exports itself // - module: Anything else // Special cases: // - usesRequireWrapper // - isWrapped async function transformCommonjs( parse, code, id, isEsModule, ignoreGlobal, ignoreRequire, ignoreDynamicRequires, getIgnoreTryCatchRequireStatementMode, sourceMap, isDynamicRequireModulesEnabled, dynamicRequireModules, commonDir, astCache, defaultIsModuleExports, needsRequireWrapper, resolveRequireSourcesAndUpdateMeta, isRequired, checkDynamicRequire, commonjsMeta ) { const ast = astCache || tryParse(parse, code, id); const magicString = new MagicString(code); const uses = { module: false, exports: false, global: false, require: false }; const virtualDynamicRequirePath = isDynamicRequireModulesEnabled && getVirtualPathForDynamicRequirePath(dirname$1(id), commonDir); let scope = attachScopes(ast, 'scope'); let lexicalDepth = 0; let programDepth = 0; let classBodyDepth = 0; let currentTryBlockEnd = null; let shouldWrap = false; const globals = new Set(); // A conditionalNode is a node for which execution is not guaranteed. If such a node is a require // or contains nested requires, those should be handled as function calls unless there is an // unconditional require elsewhere. let currentConditionalNodeEnd = null; const conditionalNodes = new Set(); const { addRequireExpression, rewriteRequireExpressionsAndGetImportBlock } = getRequireHandlers(); // See which names are assigned to. This is necessary to prevent // illegally replacing `var foo = require('foo')` with `import foo from 'foo'`, // where `foo` is later reassigned. (This happens in the wild. CommonJS, sigh) const reassignedNames = new Set(); const topLevelDeclarations = []; const skippedNodes = new Set(); const moduleAccessScopes = new Set([scope]); const exportsAccessScopes = new Set([scope]); const moduleExportsAssignments = []; let firstTopLevelModuleExportsAssignment = null; const exportsAssignmentsByName = new Map(); const topLevelAssignments = new Set(); const topLevelDefineCompiledEsmExpressions = []; const replacedGlobal = []; const replacedDynamicRequires = []; const importedVariables = new Set(); const indentExclusionRanges = []; walk$3(ast, { enter(node, parent) { if (skippedNodes.has(node)) { this.skip(); return; } if (currentTryBlockEnd !== null && node.start > currentTryBlockEnd) { currentTryBlockEnd = null; } if (currentConditionalNodeEnd !== null && node.start > currentConditionalNodeEnd) { currentConditionalNodeEnd = null; } if (currentConditionalNodeEnd === null && conditionalNodes.has(node)) { currentConditionalNodeEnd = node.end; } programDepth += 1; if (node.scope) ({ scope } = node); if (functionType.test(node.type)) lexicalDepth += 1; if (sourceMap) { magicString.addSourcemapLocation(node.start); magicString.addSourcemapLocation(node.end); } // eslint-disable-next-line default-case switch (node.type) { case 'AssignmentExpression': if (node.left.type === 'MemberExpression') { const flattened = getKeypath(node.left); if (!flattened || scope.contains(flattened.name)) return; const exportsPatternMatch = exportsPattern.exec(flattened.keypath); if (!exportsPatternMatch || flattened.keypath === 'exports') return; const [, exportName] = exportsPatternMatch; uses[flattened.name] = true; // we're dealing with `module.exports = ...` or `[module.]exports.foo = ...` – if (flattened.keypath === 'module.exports') { moduleExportsAssignments.push(node); if (programDepth > 3) { moduleAccessScopes.add(scope); } else if (!firstTopLevelModuleExportsAssignment) { firstTopLevelModuleExportsAssignment = node; } } else if (exportName === KEY_COMPILED_ESM) { if (programDepth > 3) { shouldWrap = true; } else { // The "type" is either "module" or "exports" to discern // assignments to module.exports vs exports if needed topLevelDefineCompiledEsmExpressions.push({ node, type: flattened.name }); } } else { const exportsAssignments = exportsAssignmentsByName.get(exportName) || { nodes: [], scopes: new Set() }; exportsAssignments.nodes.push({ node, type: flattened.name }); exportsAssignments.scopes.add(scope); exportsAccessScopes.add(scope); exportsAssignmentsByName.set(exportName, exportsAssignments); if (programDepth <= 3) { topLevelAssignments.add(node); } } skippedNodes.add(node.left); } else { for (const name of extractAssignedNames(node.left)) { reassignedNames.add(name); } } return; case 'CallExpression': { const defineCompiledEsmType = getDefineCompiledEsmType(node); if (defineCompiledEsmType) { if (programDepth === 3 && parent.type === 'ExpressionStatement') { // skip special handling for [module.]exports until we know we render this skippedNodes.add(node.arguments[0]); topLevelDefineCompiledEsmExpressions.push({ node, type: defineCompiledEsmType }); } else { shouldWrap = true; } return; } // Transform require.resolve if ( isDynamicRequireModulesEnabled && node.callee.object && isRequire(node.callee.object, scope) && node.callee.property.name === 'resolve' ) { checkDynamicRequire(node.start); uses.require = true; const requireNode = node.callee.object; replacedDynamicRequires.push(requireNode); skippedNodes.add(node.callee); return; } if (!isRequireExpression(node, scope)) { const keypath = getKeypath(node.callee); if (keypath && importedVariables.has(keypath.name)) { // Heuristic to deoptimize requires after a required function has been called currentConditionalNodeEnd = Infinity; } return; } skippedNodes.add(node.callee); uses.require = true; if (hasDynamicArguments(node)) { if (isDynamicRequireModulesEnabled) { checkDynamicRequire(node.start); } if (!ignoreDynamicRequires) { replacedDynamicRequires.push(node.callee); } return; } const requireStringArg = getRequireStringArg(node); if (!ignoreRequire(requireStringArg)) { const usesReturnValue = parent.type !== 'ExpressionStatement'; const toBeRemoved = parent.type === 'ExpressionStatement' && (!currentConditionalNodeEnd || // We should completely remove requires directly in a try-catch // so that Rollup can remove up the try-catch (currentTryBlockEnd !== null && currentTryBlockEnd < currentConditionalNodeEnd)) ? parent : node; addRequireExpression( requireStringArg, node, scope, usesReturnValue, currentTryBlockEnd !== null, currentConditionalNodeEnd !== null, toBeRemoved ); if (parent.type === 'VariableDeclarator' && parent.id.type === 'Identifier') { for (const name of extractAssignedNames(parent.id)) { importedVariables.add(name); } } } return; } case 'ClassBody': classBodyDepth += 1; return; case 'ConditionalExpression': case 'IfStatement': // skip dead branches if (isFalsy(node.test)) { skippedNodes.add(node.consequent); } else if (isTruthy(node.test)) { if (node.alternate) { skippedNodes.add(node.alternate); } } else { conditionalNodes.add(node.consequent); if (node.alternate) { conditionalNodes.add(node.alternate); } } return; case 'ArrowFunctionExpression': case 'FunctionDeclaration': case 'FunctionExpression': // requires in functions should be conditional unless it is an IIFE if ( currentConditionalNodeEnd === null && !(parent.type === 'CallExpression' && parent.callee === node) ) { currentConditionalNodeEnd = node.end; } return; case 'Identifier': { const { name } = node; if (!isReference(node, parent) || scope.contains(name)) return; switch (name) { case 'require': uses.require = true; if (isNodeRequirePropertyAccess(parent)) { return; } if (!ignoreDynamicRequires) { if (isShorthandProperty(parent)) { // as key and value are the same object, isReference regards // both as references, so we need to skip now skippedNodes.add(parent.value); magicString.prependRight(node.start, 'require: '); } replacedDynamicRequires.push(node); } return; case 'module': case 'exports': shouldWrap = true; uses[name] = true; return; case 'global': uses.global = true; if (!ignoreGlobal) { replacedGlobal.push(node); } return; case 'define': magicString.overwrite(node.start, node.end, 'undefined', { storeName: true }); return; default: globals.add(name); return; } } case 'LogicalExpression': // skip dead branches if (node.operator === '&&') { if (isFalsy(node.left)) { skippedNodes.add(node.right); } else if (!isTruthy(node.left)) { conditionalNodes.add(node.right); } } else if (node.operator === '||') { if (isTruthy(node.left)) { skippedNodes.add(node.right); } else if (!isFalsy(node.left)) { conditionalNodes.add(node.right); } } return; case 'MemberExpression': if (!isDynamicRequireModulesEnabled && isModuleRequire(node, scope)) { uses.require = true; replacedDynamicRequires.push(node); skippedNodes.add(node.object); skippedNodes.add(node.property); } return; case 'ReturnStatement': // if top-level return, we need to wrap it if (lexicalDepth === 0) { shouldWrap = true; } return; case 'ThisExpression': // rewrite top-level `this` as `commonjsHelpers.commonjsGlobal` if (lexicalDepth === 0 && !classBodyDepth) { uses.global = true; if (!ignoreGlobal) { replacedGlobal.push(node); } } return; case 'TryStatement': if (currentTryBlockEnd === null) { currentTryBlockEnd = node.block.end; } if (currentConditionalNodeEnd === null) { currentConditionalNodeEnd = node.end; } return; case 'UnaryExpression': // rewrite `typeof module`, `typeof module.exports` and `typeof exports` (https://github.com/rollup/rollup-plugin-commonjs/issues/151) if (node.operator === 'typeof') { const flattened = getKeypath(node.argument); if (!flattened) return; if (scope.contains(flattened.name)) return; if ( !isEsModule && (flattened.keypath === 'module.exports' || flattened.keypath === 'module' || flattened.keypath === 'exports') ) { magicString.overwrite(node.start, node.end, `'object'`, { storeName: false }); } } return; case 'VariableDeclaration': if (!scope.parent) { topLevelDeclarations.push(node); } return; case 'TemplateElement': if (node.value.raw.includes('\n')) { indentExclusionRanges.push([node.start, node.end]); } } }, leave(node) { programDepth -= 1; if (node.scope) scope = scope.parent; if (functionType.test(node.type)) lexicalDepth -= 1; if (node.type === 'ClassBody') classBodyDepth -= 1; } }); const nameBase = getName(id); const exportsName = deconflict([...exportsAccessScopes], globals, nameBase); const moduleName = deconflict([...moduleAccessScopes], globals, `${nameBase}Module`); const requireName = deconflict([scope], globals, `require${capitalize(nameBase)}`); const isRequiredName = deconflict([scope], globals, `hasRequired${capitalize(nameBase)}`); const helpersName = deconflict([scope], globals, 'commonjsHelpers'); const dynamicRequireName = replacedDynamicRequires.length > 0 && deconflict( [scope], globals, isDynamicRequireModulesEnabled ? CREATE_COMMONJS_REQUIRE_EXPORT : COMMONJS_REQUIRE_EXPORT ); const deconflictedExportNames = Object.create(null); for (const [exportName, { scopes }] of exportsAssignmentsByName) { deconflictedExportNames[exportName] = deconflict([...scopes], globals, exportName); } for (const node of replacedGlobal) { magicString.overwrite(node.start, node.end, `${helpersName}.commonjsGlobal`, { storeName: true }); } for (const node of replacedDynamicRequires) { magicString.overwrite( node.start, node.end, isDynamicRequireModulesEnabled ? `${dynamicRequireName}(${JSON.stringify(virtualDynamicRequirePath)})` : dynamicRequireName, { contentOnly: true, storeName: true } ); } // We cannot wrap ES/mixed modules shouldWrap = !isEsModule && (shouldWrap || (uses.exports && moduleExportsAssignments.length > 0)); if ( !( shouldWrap || isRequired || needsRequireWrapper || uses.module || uses.exports || uses.require || topLevelDefineCompiledEsmExpressions.length > 0 ) && (ignoreGlobal || !uses.global) ) { return { meta: { commonjs: { isCommonJS: false } } }; } let leadingComment = ''; if (code.startsWith('/*')) { const commentEnd = code.indexOf('*/', 2) + 2; leadingComment = `${code.slice(0, commentEnd)}\n`; magicString.remove(0, commentEnd).trim(); } let shebang = ''; if (code.startsWith('#!')) { const shebangEndPosition = code.indexOf('\n') + 1; shebang = code.slice(0, shebangEndPosition); magicString.remove(0, shebangEndPosition).trim(); } const exportMode = isEsModule ? 'none' : shouldWrap ? uses.module ? 'module' : 'exports' : firstTopLevelModuleExportsAssignment ? exportsAssignmentsByName.size === 0 && topLevelDefineCompiledEsmExpressions.length === 0 ? 'replace' : 'module' : moduleExportsAssignments.length === 0 ? 'exports' : 'module'; const exportedExportsName = exportMode === 'module' ? deconflict([], globals, `${nameBase}Exports`) : exportsName; const importBlock = await rewriteRequireExpressionsAndGetImportBlock( magicString, topLevelDeclarations, reassignedNames, helpersName, dynamicRequireName, moduleName, exportsName, id, exportMode, resolveRequireSourcesAndUpdateMeta, needsRequireWrapper, isEsModule, isDynamicRequireModulesEnabled, getIgnoreTryCatchRequireStatementMode, commonjsMeta ); const usesRequireWrapper = commonjsMeta.isCommonJS === IS_WRAPPED_COMMONJS; const exportBlock = isEsModule ? '' : rewriteExportsAndGetExportsBlock( magicString, moduleName, exportsName, exportedExportsName, shouldWrap, moduleExportsAssignments, firstTopLevelModuleExportsAssignment, exportsAssignmentsByName, topLevelAssignments, topLevelDefineCompiledEsmExpressions, deconflictedExportNames, code, helpersName, exportMode, defaultIsModuleExports, usesRequireWrapper, requireName ); if (shouldWrap) { wrapCode(magicString, uses, moduleName, exportsName, indentExclusionRanges); } if (usesRequireWrapper) { magicString.trim().indent('\t', { exclude: indentExclusionRanges }); const exported = exportMode === 'module' ? `${moduleName}.exports` : exportsName; magicString.prepend( `var ${isRequiredName}; function ${requireName} () { \tif (${isRequiredName}) return ${exported}; \t${isRequiredName} = 1; ` ).append(` \treturn ${exported}; }`); if (exportMode === 'replace') { magicString.prepend(`var ${exportsName};\n`); } } magicString .trim() .prepend(shebang + leadingComment + importBlock) .append(exportBlock); return { code: magicString.toString(), map: sourceMap ? magicString.generateMap() : null, syntheticNamedExports: isEsModule || usesRequireWrapper ? false : '__moduleExports', meta: { commonjs: { ...commonjsMeta, shebang } } }; } const PLUGIN_NAME = 'commonjs'; function commonjs(options = {}) { const { ignoreGlobal, ignoreDynamicRequires, requireReturnsDefault: requireReturnsDefaultOption, defaultIsModuleExports: defaultIsModuleExportsOption, esmExternals } = options; const extensions = options.extensions || ['.js']; const filter = createFilter$1(options.include, options.exclude); const isPossibleCjsId = (id) => { const extName = extname(id); return extName === '.cjs' || (extensions.includes(extName) && filter(id)); }; const { strictRequiresFilter, detectCyclesAndConditional } = getStrictRequiresFilter(options); const getRequireReturnsDefault = typeof requireReturnsDefaultOption === 'function' ? requireReturnsDefaultOption : () => requireReturnsDefaultOption; let esmExternalIds; const isEsmExternal = typeof esmExternals === 'function' ? esmExternals : Array.isArray(esmExternals) ? ((esmExternalIds = new Set(esmExternals)), (id) => esmExternalIds.has(id)) : () => esmExternals; const getDefaultIsModuleExports = typeof defaultIsModuleExportsOption === 'function' ? defaultIsModuleExportsOption : () => typeof defaultIsModuleExportsOption === 'boolean' ? defaultIsModuleExportsOption : 'auto'; const dynamicRequireRoot = typeof options.dynamicRequireRoot === 'string' ? resolve$3(options.dynamicRequireRoot) : process.cwd(); const { commonDir, dynamicRequireModules } = getDynamicRequireModules( options.dynamicRequireTargets, dynamicRequireRoot ); const isDynamicRequireModulesEnabled = dynamicRequireModules.size > 0; const ignoreRequire = typeof options.ignore === 'function' ? options.ignore : Array.isArray(options.ignore) ? (id) => options.ignore.includes(id) : () => false; const getIgnoreTryCatchRequireStatementMode = (id) => { const mode = typeof options.ignoreTryCatch === 'function' ? options.ignoreTryCatch(id) : Array.isArray(options.ignoreTryCatch) ? options.ignoreTryCatch.includes(id) : typeof options.ignoreTryCatch !== 'undefined' ? options.ignoreTryCatch : true; return { canConvertRequire: mode !== 'remove' && mode !== true, shouldRemoveRequire: mode === 'remove' }; }; const { currentlyResolving, resolveId } = getResolveId(extensions, isPossibleCjsId); const sourceMap = options.sourceMap !== false; // Initialized in buildStart let requireResolver; function transformAndCheckExports(code, id) { const normalizedId = normalizePathSlashes(id); const { isEsModule, hasDefaultExport, hasNamedExports, ast } = analyzeTopLevelStatements( this.parse, code, id ); const commonjsMeta = this.getModuleInfo(id).meta.commonjs || {}; if (hasDefaultExport) { commonjsMeta.hasDefaultExport = true; } if (hasNamedExports) { commonjsMeta.hasNamedExports = true; } if ( !dynamicRequireModules.has(normalizedId) && (!(hasCjsKeywords(code, ignoreGlobal) || requireResolver.isRequiredId(id)) || (isEsModule && !options.transformMixedEsModules)) ) { commonjsMeta.isCommonJS = false; return { meta: { commonjs: commonjsMeta } }; } const needsRequireWrapper = !isEsModule && (dynamicRequireModules.has(normalizedId) || strictRequiresFilter(id)); const checkDynamicRequire = (position) => { const normalizedDynamicRequireRoot = normalizePathSlashes(dynamicRequireRoot); if (normalizedId.indexOf(normalizedDynamicRequireRoot) !== 0) { this.error( { code: 'DYNAMIC_REQUIRE_OUTSIDE_ROOT', normalizedId, normalizedDynamicRequireRoot, message: `"${normalizedId}" contains dynamic require statements but it is not within the current dynamicRequireRoot "${normalizedDynamicRequireRoot}". You should set dynamicRequireRoot to "${dirname$1( normalizedId )}" or one of its parent directories.` }, position ); } }; return transformCommonjs( this.parse, code, id, isEsModule, ignoreGlobal || isEsModule, ignoreRequire, ignoreDynamicRequires && !isDynamicRequireModulesEnabled, getIgnoreTryCatchRequireStatementMode, sourceMap, isDynamicRequireModulesEnabled, dynamicRequireModules, commonDir, ast, getDefaultIsModuleExports(id), needsRequireWrapper, requireResolver.resolveRequireSourcesAndUpdateMeta(this), requireResolver.isRequiredId(id), checkDynamicRequire, commonjsMeta ); } return { name: PLUGIN_NAME, version: version$3, options(rawOptions) { // We inject the resolver in the beginning so that "catch-all-resolver" like node-resolver // do not prevent our plugin from resolving entry points ot proxies. const plugins = Array.isArray(rawOptions.plugins) ? [...rawOptions.plugins] : rawOptions.plugins ? [rawOptions.plugins] : []; plugins.unshift({ name: 'commonjs--resolver', resolveId }); return { ...rawOptions, plugins }; }, buildStart({ plugins }) { validateVersion(this.meta.rollupVersion, peerDependencies.rollup, 'rollup'); const nodeResolve = plugins.find(({ name }) => name === 'node-resolve'); if (nodeResolve) { validateVersion(nodeResolve.version, '^13.0.6', '@rollup/plugin-node-resolve'); } if (options.namedExports != null) { this.warn( 'The namedExports option from "@rollup/plugin-commonjs" is deprecated. Named exports are now handled automatically.' ); } requireResolver = getRequireResolver( extensions, detectCyclesAndConditional, currentlyResolving ); }, buildEnd() { if (options.strictRequires === 'debug') { const wrappedIds = requireResolver.getWrappedIds(); if (wrappedIds.length) { this.warn({ code: 'WRAPPED_IDS', ids: wrappedIds, message: `The commonjs plugin automatically wrapped the following files:\n[\n${wrappedIds .map((id) => `\t${JSON.stringify(relative$1(process.cwd(), id))}`) .join(',\n')}\n]` }); } else { this.warn({ code: 'WRAPPED_IDS', ids: wrappedIds, message: 'The commonjs plugin did not wrap any files.' }); } } }, load(id) { if (id === HELPERS_ID) { return getHelpersModule(); } if (isWrappedId(id, MODULE_SUFFIX)) { const name = getName(unwrapId(id, MODULE_SUFFIX)); return { code: `var ${name} = {exports: {}}; export {${name} as __module}`, meta: { commonjs: { isCommonJS: false } } }; } if (isWrappedId(id, EXPORTS_SUFFIX)) { const name = getName(unwrapId(id, EXPORTS_SUFFIX)); return { code: `var ${name} = {}; export {${name} as __exports}`, meta: { commonjs: { isCommonJS: false } } }; } if (isWrappedId(id, EXTERNAL_SUFFIX)) { const actualId = unwrapId(id, EXTERNAL_SUFFIX); return getUnknownRequireProxy( actualId, isEsmExternal(actualId) ? getRequireReturnsDefault(actualId) : true ); } // entry suffix is just appended to not mess up relative external resolution if (id.endsWith(ENTRY_SUFFIX)) { const acutalId = id.slice(0, -ENTRY_SUFFIX.length); const { meta: { commonjs: commonjsMeta } } = this.getModuleInfo(acutalId); const shebang = commonjsMeta?.shebang ?? ''; return getEntryProxy( acutalId, getDefaultIsModuleExports(acutalId), this.getModuleInfo, shebang ); } if (isWrappedId(id, ES_IMPORT_SUFFIX)) { const actualId = unwrapId(id, ES_IMPORT_SUFFIX); return getEsImportProxy(actualId, getDefaultIsModuleExports(actualId)); } if (id === DYNAMIC_MODULES_ID) { return getDynamicModuleRegistry( isDynamicRequireModulesEnabled, dynamicRequireModules, commonDir, ignoreDynamicRequires ); } if (isWrappedId(id, PROXY_SUFFIX)) { const actualId = unwrapId(id, PROXY_SUFFIX); return getStaticRequireProxy(actualId, getRequireReturnsDefault(actualId), this.load); } return null; }, shouldTransformCachedModule(...args) { return requireResolver.shouldTransformCachedModule.call(this, ...args); }, transform(code, id) { if (!isPossibleCjsId(id)) return null; try { return transformAndCheckExports.call(this, code, id); } catch (err) { return this.error(err, err.pos); } } }; } // Matches the scheme of a URL, eg "http://" const schemeRegex = /^[\w+.-]+:\/\//; /** * Matches the parts of a URL: * 1. Scheme, including ":", guaranteed. * 2. User/password, including "@", optional. * 3. Host, guaranteed. * 4. Port, including ":", optional. * 5. Path, including "/", optional. * 6. Query, including "?", optional. * 7. Hash, including "#", optional. */ const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/; /** * File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start * with a leading `/`, they can have a domain (but only if they don't start with a Windows drive). * * 1. Host, optional. * 2. Path, which may include "/", guaranteed. * 3. Query, including "?", optional. * 4. Hash, including "#", optional. */ const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i; function isAbsoluteUrl(input) { return schemeRegex.test(input); } function isSchemeRelativeUrl(input) { return input.startsWith('//'); } function isAbsolutePath(input) { return input.startsWith('/'); } function isFileUrl(input) { return input.startsWith('file:'); } function isRelative(input) { return /^[.?#]/.test(input); } function parseAbsoluteUrl(input) { const match = urlRegex.exec(input); return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || ''); } function parseFileUrl(input) { const match = fileRegex.exec(input); const path = match[2]; return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || ''); } function makeUrl(scheme, user, host, port, path, query, hash) { return { scheme, user, host, port, path, query, hash, type: 7 /* Absolute */, }; } function parseUrl$2(input) { if (isSchemeRelativeUrl(input)) { const url = parseAbsoluteUrl('http:' + input); url.scheme = ''; url.type = 6 /* SchemeRelative */; return url; } if (isAbsolutePath(input)) { const url = parseAbsoluteUrl('http://foo.com' + input); url.scheme = ''; url.host = ''; url.type = 5 /* AbsolutePath */; return url; } if (isFileUrl(input)) return parseFileUrl(input); if (isAbsoluteUrl(input)) return parseAbsoluteUrl(input); const url = parseAbsoluteUrl('http://foo.com/' + input); url.scheme = ''; url.host = ''; url.type = input ? input.startsWith('?') ? 3 /* Query */ : input.startsWith('#') ? 2 /* Hash */ : 4 /* RelativePath */ : 1 /* Empty */; return url; } function stripPathFilename(path) { // If a path ends with a parent directory "..", then it's a relative path with excess parent // paths. It's not a file, so we can't strip it. if (path.endsWith('/..')) return path; const index = path.lastIndexOf('/'); return path.slice(0, index + 1); } function mergePaths(url, base) { normalizePath$4(base, base.type); // If the path is just a "/", then it was an empty path to begin with (remember, we're a relative // path). if (url.path === '/') { url.path = base.path; } else { // Resolution happens relative to the base path's directory, not the file. url.path = stripPathFilename(base.path) + url.path; } } /** * The path can have empty directories "//", unneeded parents "foo/..", or current directory * "foo/.". We need to normalize to a standard representation. */ function normalizePath$4(url, type) { const rel = type <= 4 /* RelativePath */; const pieces = url.path.split('/'); // We need to preserve the first piece always, so that we output a leading slash. The item at // pieces[0] is an empty string. let pointer = 1; // Positive is the number of real directories we've output, used for popping a parent directory. // Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo". let positive = 0; // We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will // generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a // real directory, we won't need to append, unless the other conditions happen again. let addTrailingSlash = false; for (let i = 1; i < pieces.length; i++) { const piece = pieces[i]; // An empty directory, could be a trailing slash, or just a double "//" in the path. if (!piece) { addTrailingSlash = true; continue; } // If we encounter a real directory, then we don't need to append anymore. addTrailingSlash = false; // A current directory, which we can always drop. if (piece === '.') continue; // A parent directory, we need to see if there are any real directories we can pop. Else, we // have an excess of parents, and we'll need to keep the "..". if (piece === '..') { if (positive) { addTrailingSlash = true; positive--; pointer--; } else if (rel) { // If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute // URL, protocol relative URL, or an absolute path, we don't need to keep excess. pieces[pointer++] = piece; } continue; } // We've encountered a real directory. Move it to the next insertion pointer, which accounts for // any popped or dropped directories. pieces[pointer++] = piece; positive++; } let path = ''; for (let i = 1; i < pointer; i++) { path += '/' + pieces[i]; } if (!path || (addTrailingSlash && !path.endsWith('/..'))) { path += '/'; } url.path = path; } /** * Attempts to resolve `input` URL/path relative to `base`. */ function resolve$2(input, base) { if (!input && !base) return ''; const url = parseUrl$2(input); let inputType = url.type; if (base && inputType !== 7 /* Absolute */) { const baseUrl = parseUrl$2(base); const baseType = baseUrl.type; switch (inputType) { case 1 /* Empty */: url.hash = baseUrl.hash; // fall through case 2 /* Hash */: url.query = baseUrl.query; // fall through case 3 /* Query */: case 4 /* RelativePath */: mergePaths(url, baseUrl); // fall through case 5 /* AbsolutePath */: // The host, user, and port are joined, you can't copy one without the others. url.user = baseUrl.user; url.host = baseUrl.host; url.port = baseUrl.port; // fall through case 6 /* SchemeRelative */: // The input doesn't have a schema at least, so we need to copy at least that over. url.scheme = baseUrl.scheme; } if (baseType > inputType) inputType = baseType; } normalizePath$4(url, inputType); const queryHash = url.query + url.hash; switch (inputType) { // This is impossible, because of the empty checks at the start of the function. // case UrlType.Empty: case 2 /* Hash */: case 3 /* Query */: return queryHash; case 4 /* RelativePath */: { // The first char is always a "/", and we need it to be relative. const path = url.path.slice(1); if (!path) return queryHash || '.'; if (isRelative(base || input) && !isRelative(path)) { // If base started with a leading ".", or there is no base and input started with a ".", // then we need to ensure that the relative path starts with a ".". We don't know if // relative starts with a "..", though, so check before prepending. return './' + path + queryHash; } return path + queryHash; } case 5 /* AbsolutePath */: return url.path + queryHash; default: return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash; } } function resolve$1(input, base) { // The base is always treated as a directory, if it's not empty. // https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327 // https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401 if (base && !base.endsWith('/')) base += '/'; return resolve$2(input, base); } /** * Removes everything after the last "/", but leaves the slash. */ function stripFilename(path) { if (!path) return ''; const index = path.lastIndexOf('/'); return path.slice(0, index + 1); } const COLUMN$1 = 0; const SOURCES_INDEX$1 = 1; const SOURCE_LINE$1 = 2; const SOURCE_COLUMN$1 = 3; const NAMES_INDEX$1 = 4; function maybeSort(mappings, owned) { const unsortedIndex = nextUnsortedSegmentLine(mappings, 0); if (unsortedIndex === mappings.length) return mappings; // If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If // not, we do not want to modify the consumer's input array. if (!owned) mappings = mappings.slice(); for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) { mappings[i] = sortSegments(mappings[i], owned); } return mappings; } function nextUnsortedSegmentLine(mappings, start) { for (let i = start; i < mappings.length; i++) { if (!isSorted(mappings[i])) return i; } return mappings.length; } function isSorted(line) { for (let j = 1; j < line.length; j++) { if (line[j][COLUMN$1] < line[j - 1][COLUMN$1]) { return false; } } return true; } function sortSegments(line, owned) { if (!owned) line = line.slice(); return line.sort(sortComparator); } function sortComparator(a, b) { return a[COLUMN$1] - b[COLUMN$1]; } let found = false; /** * A binary search implementation that returns the index if a match is found. * If no match is found, then the left-index (the index associated with the item that comes just * before the desired index) is returned. To maintain proper sort order, a splice would happen at * the next index: * * ```js * const array = [1, 3]; * const needle = 2; * const index = binarySearch(array, needle, (item, needle) => item - needle); * * assert.equal(index, 0); * array.splice(index + 1, 0, needle); * assert.deepEqual(array, [1, 2, 3]); * ``` */ function binarySearch(haystack, needle, low, high) { while (low <= high) { const mid = low + ((high - low) >> 1); const cmp = haystack[mid][COLUMN$1] - needle; if (cmp === 0) { found = true; return mid; } if (cmp < 0) { low = mid + 1; } else { high = mid - 1; } } found = false; return low - 1; } function upperBound(haystack, needle, index) { for (let i = index + 1; i < haystack.length; index = i++) { if (haystack[i][COLUMN$1] !== needle) break; } return index; } function lowerBound(haystack, needle, index) { for (let i = index - 1; i >= 0; index = i--) { if (haystack[i][COLUMN$1] !== needle) break; } return index; } function memoizedState() { return { lastKey: -1, lastNeedle: -1, lastIndex: -1, }; } /** * This overly complicated beast is just to record the last tested line/column and the resulting * index, allowing us to skip a few tests if mappings are monotonically increasing. */ function memoizedBinarySearch(haystack, needle, state, key) { const { lastKey, lastNeedle, lastIndex } = state; let low = 0; let high = haystack.length - 1; if (key === lastKey) { if (needle === lastNeedle) { found = lastIndex !== -1 && haystack[lastIndex][COLUMN$1] === needle; return lastIndex; } if (needle >= lastNeedle) { // lastIndex may be -1 if the previous needle was not found. low = lastIndex === -1 ? 0 : lastIndex; } else { high = lastIndex; } } state.lastKey = key; state.lastNeedle = needle; return (state.lastIndex = binarySearch(haystack, needle, low, high)); } const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)'; const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)'; const LEAST_UPPER_BOUND = -1; const GREATEST_LOWER_BOUND = 1; class TraceMap { constructor(map, mapUrl) { const isString = typeof map === 'string'; if (!isString && map._decodedMemo) return map; const parsed = (isString ? JSON.parse(map) : map); const { version, file, names, sourceRoot, sources, sourcesContent } = parsed; this.version = version; this.file = file; this.names = names || []; this.sourceRoot = sourceRoot; this.sources = sources; this.sourcesContent = sourcesContent; this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined; const from = resolve$1(sourceRoot || '', stripFilename(mapUrl)); this.resolvedSources = sources.map((s) => resolve$1(s || '', from)); const { mappings } = parsed; if (typeof mappings === 'string') { this._encoded = mappings; this._decoded = undefined; } else { this._encoded = undefined; this._decoded = maybeSort(mappings, isString); } this._decodedMemo = memoizedState(); this._bySources = undefined; this._bySourceMemos = undefined; } } /** * Typescript doesn't allow friend access to private fields, so this just casts the map into a type * with public access modifiers. */ function cast$2(map) { return map; } /** * Returns the decoded (array of lines of segments) form of the SourceMap's mappings field. */ function decodedMappings(map) { var _a; return ((_a = cast$2(map))._decoded || (_a._decoded = decode(cast$2(map)._encoded))); } /** * A low-level API to find the segment associated with a generated line/column (think, from a * stack trace). Line and column here are 0-based, unlike `originalPositionFor`. */ function traceSegment(map, line, column) { const decoded = decodedMappings(map); // It's common for parent source maps to have pointers to lines that have no // mapping (like a "//# sourceMappingURL=") at the end of the child file. if (line >= decoded.length) return null; const segments = decoded[line]; const index = traceSegmentInternal(segments, cast$2(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND); return index === -1 ? null : segments[index]; } /** * A higher-level API to find the source/line/column associated with a generated line/column * (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in * `source-map` library. */ function originalPositionFor$1(map, needle) { let { line, column, bias } = needle; line--; if (line < 0) throw new Error(LINE_GTR_ZERO); if (column < 0) throw new Error(COL_GTR_EQ_ZERO); const decoded = decodedMappings(map); // It's common for parent source maps to have pointers to lines that have no // mapping (like a "//# sourceMappingURL=") at the end of the child file. if (line >= decoded.length) return OMapping(null, null, null, null); const segments = decoded[line]; const index = traceSegmentInternal(segments, cast$2(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND); if (index === -1) return OMapping(null, null, null, null); const segment = segments[index]; if (segment.length === 1) return OMapping(null, null, null, null); const { names, resolvedSources } = map; return OMapping(resolvedSources[segment[SOURCES_INDEX$1]], segment[SOURCE_LINE$1] + 1, segment[SOURCE_COLUMN$1], segment.length === 5 ? names[segment[NAMES_INDEX$1]] : null); } function OMapping(source, line, column, name) { return { source, line, column, name }; } function traceSegmentInternal(segments, memo, line, column, bias) { let index = memoizedBinarySearch(segments, column, memo, line); if (found) { index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index); } else if (bias === LEAST_UPPER_BOUND) index++; if (index === -1 || index === segments.length) return -1; return index; } /** * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the * index of the `key` in the backing array. * * This is designed to allow synchronizing a second array with the contents of the backing array, * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, * and there are never duplicates. */ class SetArray { constructor() { this._indexes = { __proto__: null }; this.array = []; } } /** * Typescript doesn't allow friend access to private fields, so this just casts the set into a type * with public access modifiers. */ function cast$1(set) { return set; } /** * Gets the index associated with `key` in the backing array, if it is already present. */ function get(setarr, key) { return cast$1(setarr)._indexes[key]; } /** * Puts `key` into the backing array, if it is not already present. Returns * the index of the `key` in the backing array. */ function put(setarr, key) { // The key may or may not be present. If it is present, it's a number. const index = get(setarr, key); if (index !== undefined) return index; const { array, _indexes: indexes } = cast$1(setarr); const length = array.push(key); return (indexes[key] = length - 1); } /** * Removes the key, if it exists in the set. */ function remove(setarr, key) { const index = get(setarr, key); if (index === undefined) return; const { array, _indexes: indexes } = cast$1(setarr); for (let i = index + 1; i < array.length; i++) { const k = array[i]; array[i - 1] = k; indexes[k]--; } indexes[key] = undefined; array.pop(); } const COLUMN = 0; const SOURCES_INDEX = 1; const SOURCE_LINE = 2; const SOURCE_COLUMN = 3; const NAMES_INDEX = 4; const NO_NAME = -1; /** * Provides the state to generate a sourcemap. */ class GenMapping { constructor({ file, sourceRoot } = {}) { this._names = new SetArray(); this._sources = new SetArray(); this._sourcesContent = []; this._mappings = []; this.file = file; this.sourceRoot = sourceRoot; this._ignoreList = new SetArray(); } } /** * Typescript doesn't allow friend access to private fields, so this just casts the map into a type * with public access modifiers. */ function cast(map) { return map; } /** * Same as `addSegment`, but will only add the segment if it generates useful information in the * resulting map. This only works correctly if segments are added **in order**, meaning you should * not add a segment with a lower generated line/column than one that came before. */ const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content); }; /** * Adds/removes the content of the source file to the source map. */ function setSourceContent(map, source, content) { const { _sources: sources, _sourcesContent: sourcesContent } = cast(map); const index = put(sources, source); sourcesContent[index] = content; } function setIgnore(map, source, ignore = true) { const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map); const index = put(sources, source); if (index === sourcesContent.length) sourcesContent[index] = null; if (ignore) put(ignoreList, index); else remove(ignoreList, index); } /** * Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects * a sourcemap, or to JSON.stringify. */ function toDecodedMap(map) { const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map); removeEmptyFinalLines(mappings); return { version: 3, file: map.file || undefined, names: names.array, sourceRoot: map.sourceRoot || undefined, sources: sources.array, sourcesContent, mappings, ignoreList: ignoreList.array, }; } /** * Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects * a sourcemap, or to JSON.stringify. */ function toEncodedMap(map) { const decoded = toDecodedMap(map); return Object.assign(Object.assign({}, decoded), { mappings: encode$1(decoded.mappings) }); } // This split declaration is only so that terser can elminiate the static initialization block. function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) { const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map); const line = getLine(mappings, genLine); const index = getColumnIndex(line, genColumn); if (!source) { if (skipable && skipSourceless(line, index)) return; return insert(line, index, [genColumn]); } const sourcesIndex = put(sources, source); const namesIndex = name ? put(names, name) : NO_NAME; if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null; if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) { return; } return insert(line, index, name ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] : [genColumn, sourcesIndex, sourceLine, sourceColumn]); } function getLine(mappings, index) { for (let i = mappings.length; i <= index; i++) { mappings[i] = []; } return mappings[index]; } function getColumnIndex(line, genColumn) { let index = line.length; for (let i = index - 1; i >= 0; index = i--) { const current = line[i]; if (genColumn >= current[COLUMN]) break; } return index; } function insert(array, index, value) { for (let i = array.length; i > index; i--) { array[i] = array[i - 1]; } array[index] = value; } function removeEmptyFinalLines(mappings) { const { length } = mappings; let len = length; for (let i = len - 1; i >= 0; len = i, i--) { if (mappings[i].length > 0) break; } if (len < length) mappings.length = len; } function skipSourceless(line, index) { // The start of a line is already sourceless, so adding a sourceless segment to the beginning // doesn't generate any useful information. if (index === 0) return true; const prev = line[index - 1]; // If the previous segment is also sourceless, then adding another sourceless segment doesn't // genrate any new information. Else, this segment will end the source/named segment and point to // a sourceless position, which is useful. return prev.length === 1; } function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) { // A source/named segment at the start of a line gives position at that genColumn if (index === 0) return false; const prev = line[index - 1]; // If the previous segment is sourceless, then we're transitioning to a source. if (prev.length === 1) return false; // If the previous segment maps to the exact same source position, then this segment doesn't // provide any new position information. return (sourcesIndex === prev[SOURCES_INDEX] && sourceLine === prev[SOURCE_LINE] && sourceColumn === prev[SOURCE_COLUMN] && namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME)); } const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false); const EMPTY_SOURCES = []; function SegmentObject(source, line, column, name, content, ignore) { return { source, line, column, name, content, ignore }; } function Source(map, sources, source, content, ignore) { return { map, sources, source, content, ignore, }; } /** * MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes * (which may themselves be SourceMapTrees). */ function MapSource(map, sources) { return Source(map, sources, '', null, false); } /** * A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive * segment tracing ends at the `OriginalSource`. */ function OriginalSource(source, content, ignore) { return Source(null, EMPTY_SOURCES, source, content, ignore); } /** * traceMappings is only called on the root level SourceMapTree, and begins the process of * resolving each mapping in terms of the original source files. */ function traceMappings(tree) { // TODO: Eventually support sourceRoot, which has to be removed because the sources are already // fully resolved. We'll need to make sources relative to the sourceRoot before adding them. const gen = new GenMapping({ file: tree.map.file }); const { sources: rootSources, map } = tree; const rootNames = map.names; const rootMappings = decodedMappings(map); for (let i = 0; i < rootMappings.length; i++) { const segments = rootMappings[i]; for (let j = 0; j < segments.length; j++) { const segment = segments[j]; const genCol = segment[0]; let traced = SOURCELESS_MAPPING; // 1-length segments only move the current generated column, there's no source information // to gather from it. if (segment.length !== 1) { const source = rootSources[segment[1]]; traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : ''); // If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a // respective segment into an original source. if (traced == null) continue; } const { column, line, name, content, source, ignore } = traced; maybeAddSegment(gen, i, genCol, source, line, column, name); if (source && content != null) setSourceContent(gen, source, content); if (ignore) setIgnore(gen, source, true); } } return gen; } /** * originalPositionFor is only called on children SourceMapTrees. It recurses down into its own * child SourceMapTrees, until we find the original source map. */ function originalPositionFor(source, line, column, name) { if (!source.map) { return SegmentObject(source.source, line, column, name, source.content, source.ignore); } const segment = traceSegment(source.map, line, column); // If we couldn't find a segment, then this doesn't exist in the sourcemap. if (segment == null) return null; // 1-length segments only move the current generated column, there's no source information // to gather from it. if (segment.length === 1) return SOURCELESS_MAPPING; return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name); } function asArray(value) { if (Array.isArray(value)) return value; return [value]; } /** * Recursively builds a tree structure out of sourcemap files, with each node * being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of * `OriginalSource`s and `SourceMapTree`s. * * Every sourcemap is composed of a collection of source files and mappings * into locations of those source files. When we generate a `SourceMapTree` for * the sourcemap, we attempt to load each source file's own sourcemap. If it * does not have an associated sourcemap, it is considered an original, * unmodified source file. */ function buildSourceMapTree(input, loader) { const maps = asArray(input).map((m) => new TraceMap(m, '')); const map = maps.pop(); for (let i = 0; i < maps.length; i++) { if (maps[i].sources.length > 1) { throw new Error(`Transformation map ${i} must have exactly one source file.\n` + 'Did you specify these with the most recent transformation maps first?'); } } let tree = build$2(map, loader, '', 0); for (let i = maps.length - 1; i >= 0; i--) { tree = MapSource(maps[i], [tree]); } return tree; } function build$2(map, loader, importer, importerDepth) { const { resolvedSources, sourcesContent, ignoreList } = map; const depth = importerDepth + 1; const children = resolvedSources.map((sourceFile, i) => { // The loading context gives the loader more information about why this file is being loaded // (eg, from which importer). It also allows the loader to override the location of the loaded // sourcemap/original source, or to override the content in the sourcesContent field if it's // an unmodified source file. const ctx = { importer, depth, source: sourceFile || '', content: undefined, ignore: undefined, }; // Use the provided loader callback to retrieve the file's sourcemap. // TODO: We should eventually support async loading of sourcemap files. const sourceMap = loader(ctx.source, ctx); const { source, content, ignore } = ctx; // If there is a sourcemap, then we need to recurse into it to load its source files. if (sourceMap) return build$2(new TraceMap(sourceMap, source), loader, source, depth); // Else, it's an unmodified source file. // The contents of this unmodified source file can be overridden via the loader context, // allowing it to be explicitly null or a string. If it remains undefined, we fall back to // the importing sourcemap's `sourcesContent` field. const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null; const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false; return OriginalSource(source, sourceContent, ignored); }); return MapSource(map, children); } /** * A SourceMap v3 compatible sourcemap, which only includes fields that were * provided to it. */ class SourceMap { constructor(map, options) { const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map); this.version = out.version; // SourceMap spec says this should be first. this.file = out.file; this.mappings = out.mappings; this.names = out.names; this.ignoreList = out.ignoreList; this.sourceRoot = out.sourceRoot; this.sources = out.sources; if (!options.excludeContent) { this.sourcesContent = out.sourcesContent; } } toString() { return JSON.stringify(this); } } /** * Traces through all the mappings in the root sourcemap, through the sources * (and their sourcemaps), all the way back to the original source location. * * `loader` will be called every time we encounter a source file. If it returns * a sourcemap, we will recurse into that sourcemap to continue the trace. If * it returns a falsey value, that source file is treated as an original, * unmodified source file. * * Pass `excludeContent` to exclude any self-containing source file content * from the output sourcemap. * * Pass `decodedMappings` to receive a SourceMap with decoded (instead of * VLQ encoded) mappings. */ function remapping(input, loader, options) { const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false }; const tree = buildSourceMapTree(input, loader); return new SourceMap(traceMappings(tree), opts); } var src$2 = {exports: {}}; var node$1 = {exports: {}}; /** * Helpers. */ var ms$1; var hasRequiredMs$1; function requireMs$1 () { if (hasRequiredMs$1) return ms$1; hasRequiredMs$1 = 1; var s = 1000; var m = s * 60; var h = m * 60; var d = h * 24; var w = d * 7; var y = d * 365.25; /** * Parse or format the given `val`. * * Options: * * - `long` verbose formatting [false] * * @param {String|Number} val * @param {Object} [options] * @throws {Error} throw an error if val is not a non-empty string or a number * @return {String|Number} * @api public */ ms$1 = function(val, options) { options = options || {}; var type = typeof val; if (type === 'string' && val.length > 0) { return parse(val); } else if (type === 'number' && isFinite(val)) { return options.long ? fmtLong(val) : fmtShort(val); } throw new Error( 'val is not a non-empty string or a valid number. val=' + JSON.stringify(val) ); }; /** * Parse the given `str` and return milliseconds. * * @param {String} str * @return {Number} * @api private */ function parse(str) { str = String(str); if (str.length > 100) { return; } var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( str ); if (!match) { return; } var n = parseFloat(match[1]); var type = (match[2] || 'ms').toLowerCase(); switch (type) { case 'years': case 'year': case 'yrs': case 'yr': case 'y': return n * y; case 'weeks': case 'week': case 'w': return n * w; case 'days': case 'day': case 'd': return n * d; case 'hours': case 'hour': case 'hrs': case 'hr': case 'h': return n * h; case 'minutes': case 'minute': case 'mins': case 'min': case 'm': return n * m; case 'seconds': case 'second': case 'secs': case 'sec': case 's': return n * s; case 'milliseconds': case 'millisecond': case 'msecs': case 'msec': case 'ms': return n; default: return undefined; } } /** * Short format for `ms`. * * @param {Number} ms * @return {String} * @api private */ function fmtShort(ms) { var msAbs = Math.abs(ms); if (msAbs >= d) { return Math.round(ms / d) + 'd'; } if (msAbs >= h) { return Math.round(ms / h) + 'h'; } if (msAbs >= m) { return Math.round(ms / m) + 'm'; } if (msAbs >= s) { return Math.round(ms / s) + 's'; } return ms + 'ms'; } /** * Long format for `ms`. * * @param {Number} ms * @return {String} * @api private */ function fmtLong(ms) { var msAbs = Math.abs(ms); if (msAbs >= d) { return plural(ms, msAbs, d, 'day'); } if (msAbs >= h) { return plural(ms, msAbs, h, 'hour'); } if (msAbs >= m) { return plural(ms, msAbs, m, 'minute'); } if (msAbs >= s) { return plural(ms, msAbs, s, 'second'); } return ms + ' ms'; } /** * Pluralization helper. */ function plural(ms, msAbs, n, name) { var isPlural = msAbs >= n * 1.5; return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); } return ms$1; } var common$b; var hasRequiredCommon; function requireCommon () { if (hasRequiredCommon) return common$b; hasRequiredCommon = 1; /** * This is the common logic for both the Node.js and web browser * implementations of `debug()`. */ function setup(env) { createDebug.debug = createDebug; createDebug.default = createDebug; createDebug.coerce = coerce; createDebug.disable = disable; createDebug.enable = enable; createDebug.enabled = enabled; createDebug.humanize = requireMs$1(); createDebug.destroy = destroy; Object.keys(env).forEach(key => { createDebug[key] = env[key]; }); /** * The currently active debug mode names, and names to skip. */ createDebug.names = []; createDebug.skips = []; /** * Map of special "%n" handling functions, for the debug "format" argument. * * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". */ createDebug.formatters = {}; /** * Selects a color for a debug namespace * @param {String} namespace The namespace string for the debug instance to be colored * @return {Number|String} An ANSI color code for the given namespace * @api private */ function selectColor(namespace) { let hash = 0; for (let i = 0; i < namespace.length; i++) { hash = ((hash << 5) - hash) + namespace.charCodeAt(i); hash |= 0; // Convert to 32bit integer } return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; } createDebug.selectColor = selectColor; /** * Create a debugger with the given `namespace`. * * @param {String} namespace * @return {Function} * @api public */ function createDebug(namespace) { let prevTime; let enableOverride = null; let namespacesCache; let enabledCache; function debug(...args) { // Disabled? if (!debug.enabled) { return; } const self = debug; // Set `diff` timestamp const curr = Number(new Date()); const ms = curr - (prevTime || curr); self.diff = ms; self.prev = prevTime; self.curr = curr; prevTime = curr; args[0] = createDebug.coerce(args[0]); if (typeof args[0] !== 'string') { // Anything else let's inspect with %O args.unshift('%O'); } // Apply any `formatters` transformations let index = 0; args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { // If we encounter an escaped % then don't increase the array index if (match === '%%') { return '%'; } index++; const formatter = createDebug.formatters[format]; if (typeof formatter === 'function') { const val = args[index]; match = formatter.call(self, val); // Now we need to remove `args[index]` since it's inlined in the `format` args.splice(index, 1); index--; } return match; }); // Apply env-specific formatting (colors, etc.) createDebug.formatArgs.call(self, args); const logFn = self.log || createDebug.log; logFn.apply(self, args); } debug.namespace = namespace; debug.useColors = createDebug.useColors(); debug.color = createDebug.selectColor(namespace); debug.extend = extend; debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. Object.defineProperty(debug, 'enabled', { enumerable: true, configurable: false, get: () => { if (enableOverride !== null) { return enableOverride; } if (namespacesCache !== createDebug.namespaces) { namespacesCache = createDebug.namespaces; enabledCache = createDebug.enabled(namespace); } return enabledCache; }, set: v => { enableOverride = v; } }); // Env-specific initialization logic for debug instances if (typeof createDebug.init === 'function') { createDebug.init(debug); } return debug; } function extend(namespace, delimiter) { const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); newDebug.log = this.log; return newDebug; } /** * Enables a debug mode by namespaces. This can include modes * separated by a colon and wildcards. * * @param {String} namespaces * @api public */ function enable(namespaces) { createDebug.save(namespaces); createDebug.namespaces = namespaces; createDebug.names = []; createDebug.skips = []; let i; const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); const len = split.length; for (i = 0; i < len; i++) { if (!split[i]) { // ignore empty strings continue; } namespaces = split[i].replace(/\*/g, '.*?'); if (namespaces[0] === '-') { createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); } else { createDebug.names.push(new RegExp('^' + namespaces + '$')); } } } /** * Disable debug output. * * @return {String} namespaces * @api public */ function disable() { const namespaces = [ ...createDebug.names.map(toNamespace), ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) ].join(','); createDebug.enable(''); return namespaces; } /** * Returns true if the given mode name is enabled, false otherwise. * * @param {String} name * @return {Boolean} * @api public */ function enabled(name) { if (name[name.length - 1] === '*') { return true; } let i; let len; for (i = 0, len = createDebug.skips.length; i < len; i++) { if (createDebug.skips[i].test(name)) { return false; } } for (i = 0, len = createDebug.names.length; i < len; i++) { if (createDebug.names[i].test(name)) { return true; } } return false; } /** * Convert regexp to namespace * * @param {RegExp} regxep * @return {String} namespace * @api private */ function toNamespace(regexp) { return regexp.toString() .substring(2, regexp.toString().length - 2) .replace(/\.\*\?$/, '*'); } /** * Coerce `val`. * * @param {Mixed} val * @return {Mixed} * @api private */ function coerce(val) { if (val instanceof Error) { return val.stack || val.message; } return val; } /** * XXX DO NOT USE. This is a temporary stub function. * XXX It WILL be removed in the next major release. */ function destroy() { console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); } createDebug.enable(createDebug.load()); return createDebug; } common$b = setup; return common$b; } /** * Module dependencies. */ var hasRequiredNode$1; function requireNode$1 () { if (hasRequiredNode$1) return node$1.exports; hasRequiredNode$1 = 1; (function (module, exports) { const tty = require$$0$3; const util = require$$0$6; /** * This is the Node.js implementation of `debug()`. */ exports.init = init; exports.log = log; exports.formatArgs = formatArgs; exports.save = save; exports.load = load; exports.useColors = useColors; exports.destroy = util.deprecate( () => {}, 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' ); /** * Colors. */ exports.colors = [6, 2, 3, 4, 5, 1]; try { // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) // eslint-disable-next-line import/no-extraneous-dependencies const supportsColor = require('supports-color'); if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { exports.colors = [ 20, 21, 26, 27, 32, 33, 38, 39, 40, 41, 42, 43, 44, 45, 56, 57, 62, 63, 68, 69, 74, 75, 76, 77, 78, 79, 80, 81, 92, 93, 98, 99, 112, 113, 128, 129, 134, 135, 148, 149, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 178, 179, 184, 185, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 214, 215, 220, 221 ]; } } catch (error) { // Swallow - we only care if `supports-color` is available; it doesn't have to be. } /** * Build up the default `inspectOpts` object from the environment variables. * * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js */ exports.inspectOpts = Object.keys(process.env).filter(key => { return /^debug_/i.test(key); }).reduce((obj, key) => { // Camel-case const prop = key .substring(6) .toLowerCase() .replace(/_([a-z])/g, (_, k) => { return k.toUpperCase(); }); // Coerce string value into JS value let val = process.env[key]; if (/^(yes|on|true|enabled)$/i.test(val)) { val = true; } else if (/^(no|off|false|disabled)$/i.test(val)) { val = false; } else if (val === 'null') { val = null; } else { val = Number(val); } obj[prop] = val; return obj; }, {}); /** * Is stdout a TTY? Colored output is enabled when `true`. */ function useColors() { return 'colors' in exports.inspectOpts ? Boolean(exports.inspectOpts.colors) : tty.isatty(process.stderr.fd); } /** * Adds ANSI color escape codes if enabled. * * @api public */ function formatArgs(args) { const {namespace: name, useColors} = this; if (useColors) { const c = this.color; const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); const prefix = ` ${colorCode};1m${name} \u001B[0m`; args[0] = prefix + args[0].split('\n').join('\n' + prefix); args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); } else { args[0] = getDate() + name + ' ' + args[0]; } } function getDate() { if (exports.inspectOpts.hideDate) { return ''; } return new Date().toISOString() + ' '; } /** * Invokes `util.format()` with the specified arguments and writes to stderr. */ function log(...args) { return process.stderr.write(util.format(...args) + '\n'); } /** * Save `namespaces`. * * @param {String} namespaces * @api private */ function save(namespaces) { if (namespaces) { process.env.DEBUG = namespaces; } else { // If you set a process.env field to null or undefined, it gets cast to the // string 'null' or 'undefined'. Just delete instead. delete process.env.DEBUG; } } /** * Load `namespaces`. * * @return {String} returns the previously persisted debug modes * @api private */ function load() { return process.env.DEBUG; } /** * Init logic for `debug` instances. * * Create a new `inspectOpts` object in case `useColors` is set * differently for a particular `debug` instance. */ function init(debug) { debug.inspectOpts = {}; const keys = Object.keys(exports.inspectOpts); for (let i = 0; i < keys.length; i++) { debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; } } module.exports = requireCommon()(exports); const {formatters} = module.exports; /** * Map %o to `util.inspect()`, all on a single line. */ formatters.o = function (v) { this.inspectOpts.colors = this.useColors; return util.inspect(v, this.inspectOpts) .split('\n') .map(str => str.trim()) .join(' '); }; /** * Map %O to `util.inspect()`, allowing multiple lines if needed. */ formatters.O = function (v) { this.inspectOpts.colors = this.useColors; return util.inspect(v, this.inspectOpts); }; } (node$1, node$1.exports)); return node$1.exports; } var browser$3 = {exports: {}}; /* eslint-env browser */ var hasRequiredBrowser$1; function requireBrowser$1 () { if (hasRequiredBrowser$1) return browser$3.exports; hasRequiredBrowser$1 = 1; (function (module, exports) { /** * This is the web browser implementation of `debug()`. */ exports.formatArgs = formatArgs; exports.save = save; exports.load = load; exports.useColors = useColors; exports.storage = localstorage(); exports.destroy = (() => { let warned = false; return () => { if (!warned) { warned = true; console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); } }; })(); /** * Colors. */ exports.colors = [ '#0000CC', '#0000FF', '#0033CC', '#0033FF', '#0066CC', '#0066FF', '#0099CC', '#0099FF', '#00CC00', '#00CC33', '#00CC66', '#00CC99', '#00CCCC', '#00CCFF', '#3300CC', '#3300FF', '#3333CC', '#3333FF', '#3366CC', '#3366FF', '#3399CC', '#3399FF', '#33CC00', '#33CC33', '#33CC66', '#33CC99', '#33CCCC', '#33CCFF', '#6600CC', '#6600FF', '#6633CC', '#6633FF', '#66CC00', '#66CC33', '#9900CC', '#9900FF', '#9933CC', '#9933FF', '#99CC00', '#99CC33', '#CC0000', '#CC0033', '#CC0066', '#CC0099', '#CC00CC', '#CC00FF', '#CC3300', '#CC3333', '#CC3366', '#CC3399', '#CC33CC', '#CC33FF', '#CC6600', '#CC6633', '#CC9900', '#CC9933', '#CCCC00', '#CCCC33', '#FF0000', '#FF0033', '#FF0066', '#FF0099', '#FF00CC', '#FF00FF', '#FF3300', '#FF3333', '#FF3366', '#FF3399', '#FF33CC', '#FF33FF', '#FF6600', '#FF6633', '#FF9900', '#FF9933', '#FFCC00', '#FFCC33' ]; /** * Currently only WebKit-based Web Inspectors, Firefox >= v31, * and the Firebug extension (any Firefox version) are known * to support "%c" CSS customizations. * * TODO: add a `localStorage` variable to explicitly enable/disable colors */ // eslint-disable-next-line complexity function useColors() { // NB: In an Electron preload script, document will be defined but not fully // initialized. Since we know we're in Chrome, we'll just detect this case // explicitly if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { return true; } // Internet Explorer and Edge do not support colors. if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { return false; } // Is webkit? http://stackoverflow.com/a/16459606/376773 // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || // Is firebug? http://stackoverflow.com/a/398120/376773 (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || // Is firefox >= v31? // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || // Double check webkit in userAgent just in case we are in a worker (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); } /** * Colorize log arguments if enabled. * * @api public */ function formatArgs(args) { args[0] = (this.useColors ? '%c' : '') + this.namespace + (this.useColors ? ' %c' : ' ') + args[0] + (this.useColors ? '%c ' : ' ') + '+' + module.exports.humanize(this.diff); if (!this.useColors) { return; } const c = 'color: ' + this.color; args.splice(1, 0, c, 'color: inherit'); // The final "%c" is somewhat tricky, because there could be other // arguments passed either before or after the %c, so we need to // figure out the correct index to insert the CSS into let index = 0; let lastC = 0; args[0].replace(/%[a-zA-Z%]/g, match => { if (match === '%%') { return; } index++; if (match === '%c') { // We only are interested in the *last* %c // (the user may have provided their own) lastC = index; } }); args.splice(lastC, 0, c); } /** * Invokes `console.debug()` when available. * No-op when `console.debug` is not a "function". * If `console.debug` is not available, falls back * to `console.log`. * * @api public */ exports.log = console.debug || console.log || (() => {}); /** * Save `namespaces`. * * @param {String} namespaces * @api private */ function save(namespaces) { try { if (namespaces) { exports.storage.setItem('debug', namespaces); } else { exports.storage.removeItem('debug'); } } catch (error) { // Swallow // XXX (@Qix-) should we be logging these? } } /** * Load `namespaces`. * * @return {String} returns the previously persisted debug modes * @api private */ function load() { let r; try { r = exports.storage.getItem('debug'); } catch (error) { // Swallow // XXX (@Qix-) should we be logging these? } // If debug isn't set in LS, and we're in Electron, try to load $DEBUG if (!r && typeof process !== 'undefined' && 'env' in process) { r = process.env.DEBUG; } return r; } /** * Localstorage attempts to return the localstorage. * * This is necessary because safari throws * when a user disables cookies/localstorage * and you attempt to access it. * * @return {LocalStorage} * @api private */ function localstorage() { try { // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context // The Browser also has localStorage in the global context. return localStorage; } catch (error) { // Swallow // XXX (@Qix-) should we be logging these? } } module.exports = requireCommon()(exports); const {formatters} = module.exports; /** * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. */ formatters.j = function (v) { try { return JSON.stringify(v); } catch (error) { return '[UnexpectedJSONParseError]: ' + error.message; } }; } (browser$3, browser$3.exports)); return browser$3.exports; } /** * Detect Electron renderer / nwjs process, which is node, but we should * treat as a browser. */ if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { src$2.exports = requireBrowser$1(); } else { src$2.exports = requireNode$1(); } var srcExports$1 = src$2.exports; var debug$i = /*@__PURE__*/getDefaultExportFromCjs(srcExports$1); let pnp; if (process.versions.pnp) { try { pnp = createRequire$1(import.meta.url)('pnpapi'); } catch { } } function invalidatePackageData(packageCache, pkgPath) { const pkgDir = path$o.dirname(pkgPath); packageCache.forEach((pkg, cacheKey) => { if (pkg.dir === pkgDir) { packageCache.delete(cacheKey); } }); } function resolvePackageData(pkgName, basedir, preserveSymlinks = false, packageCache) { if (pnp) { const cacheKey = getRpdCacheKey(pkgName, basedir, preserveSymlinks); if (packageCache?.has(cacheKey)) return packageCache.get(cacheKey); try { const pkg = pnp.resolveToUnqualified(pkgName, basedir, { considerBuiltins: false, }); if (!pkg) return null; const pkgData = loadPackageData(path$o.join(pkg, 'package.json')); packageCache?.set(cacheKey, pkgData); return pkgData; } catch { return null; } } const originalBasedir = basedir; while (basedir) { if (packageCache) { const cached = getRpdCache(packageCache, pkgName, basedir, originalBasedir, preserveSymlinks); if (cached) return cached; } const pkg = path$o.join(basedir, 'node_modules', pkgName, 'package.json'); try { if (fs$l.existsSync(pkg)) { const pkgPath = preserveSymlinks ? pkg : safeRealpathSync(pkg); const pkgData = loadPackageData(pkgPath); if (packageCache) { setRpdCache(packageCache, pkgData, pkgName, basedir, originalBasedir, preserveSymlinks); } return pkgData; } } catch { } const nextBasedir = path$o.dirname(basedir); if (nextBasedir === basedir) break; basedir = nextBasedir; } return null; } function findNearestPackageData(basedir, packageCache) { const originalBasedir = basedir; while (basedir) { if (packageCache) { const cached = getFnpdCache(packageCache, basedir, originalBasedir); if (cached) return cached; } const pkgPath = path$o.join(basedir, 'package.json'); if (tryStatSync(pkgPath)?.isFile()) { try { const pkgData = loadPackageData(pkgPath); if (packageCache) { setFnpdCache(packageCache, pkgData, basedir, originalBasedir); } return pkgData; } catch { } } const nextBasedir = path$o.dirname(basedir); if (nextBasedir === basedir) break; basedir = nextBasedir; } return null; } // Finds the nearest package.json with a `name` field function findNearestMainPackageData(basedir, packageCache) { const nearestPackage = findNearestPackageData(basedir, packageCache); return (nearestPackage && (nearestPackage.data.name ? nearestPackage : findNearestMainPackageData(path$o.dirname(nearestPackage.dir), packageCache))); } function loadPackageData(pkgPath) { const data = JSON.parse(fs$l.readFileSync(pkgPath, 'utf-8')); const pkgDir = path$o.dirname(pkgPath); const { sideEffects } = data; let hasSideEffects; if (typeof sideEffects === 'boolean') { hasSideEffects = () => sideEffects; } else if (Array.isArray(sideEffects)) { const finalPackageSideEffects = sideEffects.map((sideEffect) => { /* * The array accepts simple glob patterns to the relevant files... Patterns like *.css, which do not include a /, will be treated like **\/*.css. * https://webpack.js.org/guides/tree-shaking/ * https://github.com/vitejs/vite/pull/11807 */ if (sideEffect.includes('/')) { return sideEffect; } return `**/${sideEffect}`; }); hasSideEffects = createFilter(finalPackageSideEffects, null, { resolve: pkgDir, }); } else { hasSideEffects = () => null; } const pkg = { dir: pkgDir, data, hasSideEffects, webResolvedImports: {}, nodeResolvedImports: {}, setResolvedCache(key, entry, targetWeb) { if (targetWeb) { pkg.webResolvedImports[key] = entry; } else { pkg.nodeResolvedImports[key] = entry; } }, getResolvedCache(key, targetWeb) { if (targetWeb) { return pkg.webResolvedImports[key]; } else { return pkg.nodeResolvedImports[key]; } }, }; return pkg; } function watchPackageDataPlugin(packageCache) { // a list of files to watch before the plugin is ready const watchQueue = new Set(); const watchedDirs = new Set(); const watchFileStub = (id) => { watchQueue.add(id); }; let watchFile = watchFileStub; const setPackageData = packageCache.set.bind(packageCache); packageCache.set = (id, pkg) => { if (!isInNodeModules$1(pkg.dir) && !watchedDirs.has(pkg.dir)) { watchedDirs.add(pkg.dir); watchFile(path$o.join(pkg.dir, 'package.json')); } return setPackageData(id, pkg); }; return { name: 'vite:watch-package-data', buildStart() { watchFile = this.addWatchFile.bind(this); watchQueue.forEach(watchFile); watchQueue.clear(); }, buildEnd() { watchFile = watchFileStub; }, watchChange(id) { if (id.endsWith('/package.json')) { invalidatePackageData(packageCache, path$o.normalize(id)); } }, handleHotUpdate({ file }) { if (file.endsWith('/package.json')) { invalidatePackageData(packageCache, path$o.normalize(file)); } }, }; } /** * Get cached `resolvePackageData` value based on `basedir`. When one is found, * and we've already traversed some directories between `basedir` and `originalBasedir`, * we cache the value for those in-between directories as well. * * This makes it so the fs is only read once for a shared `basedir`. */ function getRpdCache(packageCache, pkgName, basedir, originalBasedir, preserveSymlinks) { const cacheKey = getRpdCacheKey(pkgName, basedir, preserveSymlinks); const pkgData = packageCache.get(cacheKey); if (pkgData) { traverseBetweenDirs(originalBasedir, basedir, (dir) => { packageCache.set(getRpdCacheKey(pkgName, dir, preserveSymlinks), pkgData); }); return pkgData; } } function setRpdCache(packageCache, pkgData, pkgName, basedir, originalBasedir, preserveSymlinks) { packageCache.set(getRpdCacheKey(pkgName, basedir, preserveSymlinks), pkgData); traverseBetweenDirs(originalBasedir, basedir, (dir) => { packageCache.set(getRpdCacheKey(pkgName, dir, preserveSymlinks), pkgData); }); } // package cache key for `resolvePackageData` function getRpdCacheKey(pkgName, basedir, preserveSymlinks) { return `rpd_${pkgName}_${basedir}_${preserveSymlinks}`; } /** * Get cached `findNearestPackageData` value based on `basedir`. When one is found, * and we've already traversed some directories between `basedir` and `originalBasedir`, * we cache the value for those in-between directories as well. * * This makes it so the fs is only read once for a shared `basedir`. */ function getFnpdCache(packageCache, basedir, originalBasedir) { const cacheKey = getFnpdCacheKey(basedir); const pkgData = packageCache.get(cacheKey); if (pkgData) { traverseBetweenDirs(originalBasedir, basedir, (dir) => { packageCache.set(getFnpdCacheKey(dir), pkgData); }); return pkgData; } } function setFnpdCache(packageCache, pkgData, basedir, originalBasedir) { packageCache.set(getFnpdCacheKey(basedir), pkgData); traverseBetweenDirs(originalBasedir, basedir, (dir) => { packageCache.set(getFnpdCacheKey(dir), pkgData); }); } // package cache key for `findNearestPackageData` function getFnpdCacheKey(basedir) { return `fnpd_${basedir}`; } /** * Traverse between `longerDir` (inclusive) and `shorterDir` (exclusive) and call `cb` for each dir. * @param longerDir Longer dir path, e.g. `/User/foo/bar/baz` * @param shorterDir Shorter dir path, e.g. `/User/foo` */ function traverseBetweenDirs(longerDir, shorterDir, cb) { while (longerDir !== shorterDir) { cb(longerDir); longerDir = path$o.dirname(longerDir); } } const createFilter = createFilter$1; const replaceSlashOrColonRE = /[/:]/g; const replaceDotRE = /\./g; const replaceNestedIdRE = /(\s*>\s*)/g; const replaceHashRE = /#/g; const flattenId = (id) => { const flatId = limitFlattenIdLength(id .replace(replaceSlashOrColonRE, '_') .replace(replaceDotRE, '__') .replace(replaceNestedIdRE, '___') .replace(replaceHashRE, '____')); return flatId; }; const FLATTEN_ID_HASH_LENGTH = 8; const FLATTEN_ID_MAX_FILE_LENGTH = 170; const limitFlattenIdLength = (id, limit = FLATTEN_ID_MAX_FILE_LENGTH) => { if (id.length <= limit) { return id; } return id.slice(0, limit - (FLATTEN_ID_HASH_LENGTH + 1)) + '_' + getHash(id); }; const normalizeId = (id) => id.replace(replaceNestedIdRE, ' > '); // Supported by Node, Deno, Bun const NODE_BUILTIN_NAMESPACE = 'node:'; // Supported by Deno const NPM_BUILTIN_NAMESPACE = 'npm:'; // Supported by Bun const BUN_BUILTIN_NAMESPACE = 'bun:'; // Some runtimes like Bun injects namespaced modules here, which is not a node builtin const nodeBuiltins = builtinModules.filter((id) => !id.includes(':')); // TODO: Use `isBuiltin` from `node:module`, but Deno doesn't support it function isBuiltin(id) { if (process.versions.deno && id.startsWith(NPM_BUILTIN_NAMESPACE)) return true; if (process.versions.bun && id.startsWith(BUN_BUILTIN_NAMESPACE)) return true; return isNodeBuiltin(id); } function isNodeBuiltin(id) { if (id.startsWith(NODE_BUILTIN_NAMESPACE)) return true; return nodeBuiltins.includes(id); } function isInNodeModules$1(id) { return id.includes('node_modules'); } function moduleListContains(moduleList, id) { return moduleList?.some((m) => m === id || id.startsWith(withTrailingSlash(m))); } function isOptimizable(id, optimizeDeps) { const { extensions } = optimizeDeps; return (OPTIMIZABLE_ENTRY_RE.test(id) || (extensions?.some((ext) => id.endsWith(ext)) ?? false)); } const bareImportRE = /^(?![a-zA-Z]:)[\w@](?!.*:\/\/)/; const deepImportRE = /^([^@][^/]*)\/|^(@[^/]+\/[^/]+)\//; // TODO: use import() const _require$1 = createRequire$1(import.meta.url); function resolveDependencyVersion(dep, pkgRelativePath = '../../package.json') { const pkgPath = path$o.resolve(_require$1.resolve(dep), pkgRelativePath); return JSON.parse(fs$l.readFileSync(pkgPath, 'utf-8')).version; } const rollupVersion = resolveDependencyVersion('rollup'); // set in bin/vite.js const filter = process.env.VITE_DEBUG_FILTER; const DEBUG = process.env.DEBUG; function createDebugger(namespace, options = {}) { const log = debug$i(namespace); const { onlyWhenFocused } = options; let enabled = log.enabled; if (enabled && onlyWhenFocused) { const ns = typeof onlyWhenFocused === 'string' ? onlyWhenFocused : namespace; enabled = !!DEBUG?.includes(ns); } if (enabled) { return (...args) => { if (!filter || args.some((a) => a?.includes?.(filter))) { log(...args); } }; } } function testCaseInsensitiveFS() { if (!CLIENT_ENTRY.endsWith('client.mjs')) { throw new Error(`cannot test case insensitive FS, CLIENT_ENTRY const doesn't contain client.mjs`); } if (!fs$l.existsSync(CLIENT_ENTRY)) { throw new Error('cannot test case insensitive FS, CLIENT_ENTRY does not point to an existing file: ' + CLIENT_ENTRY); } return fs$l.existsSync(CLIENT_ENTRY.replace('client.mjs', 'cLiEnT.mjs')); } const urlCanParse = URL$3.canParse ?? // URL.canParse is supported from Node.js 18.17.0+, 20.0.0+ ((path, base) => { try { new URL$3(path, base); return true; } catch { return false; } }); const isCaseInsensitiveFS = testCaseInsensitiveFS(); const VOLUME_RE = /^[A-Z]:/i; function normalizePath$3(id) { return path$o.posix.normalize(isWindows$5 ? slash$1(id) : id); } function fsPathFromId(id) { const fsPath = normalizePath$3(id.startsWith(FS_PREFIX) ? id.slice(FS_PREFIX.length) : id); return fsPath[0] === '/' || VOLUME_RE.test(fsPath) ? fsPath : `/${fsPath}`; } function fsPathFromUrl(url) { return fsPathFromId(cleanUrl(url)); } /** * Check if dir is a parent of file * * Warning: parameters are not validated, only works with normalized absolute paths * * @param dir - normalized absolute path * @param file - normalized absolute path * @returns true if dir is a parent of file */ function isParentDirectory(dir, file) { dir = withTrailingSlash(dir); return (file.startsWith(dir) || (isCaseInsensitiveFS && file.toLowerCase().startsWith(dir.toLowerCase()))); } /** * Check if 2 file name are identical * * Warning: parameters are not validated, only works with normalized absolute paths * * @param file1 - normalized absolute path * @param file2 - normalized absolute path * @returns true if both files url are identical */ function isSameFileUri(file1, file2) { return (file1 === file2 || (isCaseInsensitiveFS && file1.toLowerCase() === file2.toLowerCase())); } const externalRE = /^(https?:)?\/\//; const isExternalUrl = (url) => externalRE.test(url); const dataUrlRE = /^\s*data:/i; const isDataUrl = (url) => dataUrlRE.test(url); const virtualModuleRE = /^virtual-module:.*/; const virtualModulePrefix = 'virtual-module:'; const knownJsSrcRE = /\.(?:[jt]sx?|m[jt]s|vue|marko|svelte|astro|imba|mdx)(?:$|\?)/; const isJSRequest = (url) => { url = cleanUrl(url); if (knownJsSrcRE.test(url)) { return true; } if (!path$o.extname(url) && url[url.length - 1] !== '/') { return true; } return false; }; const knownTsRE = /\.(?:ts|mts|cts|tsx)(?:$|\?)/; const isTsRequest = (url) => knownTsRE.test(url); const importQueryRE = /(\?|&)import=?(?:&|$)/; const directRequestRE$1 = /(\?|&)direct=?(?:&|$)/; const internalPrefixes = [ FS_PREFIX, VALID_ID_PREFIX, CLIENT_PUBLIC_PATH, ENV_PUBLIC_PATH, ]; const InternalPrefixRE = new RegExp(`^(?:${internalPrefixes.join('|')})`); const trailingSeparatorRE = /[?&]$/; const isImportRequest = (url) => importQueryRE.test(url); const isInternalRequest = (url) => InternalPrefixRE.test(url); function removeImportQuery(url) { return url.replace(importQueryRE, '$1').replace(trailingSeparatorRE, ''); } function removeDirectQuery(url) { return url.replace(directRequestRE$1, '$1').replace(trailingSeparatorRE, ''); } const urlRE = /(\?|&)url(?:&|$)/; const rawRE = /(\?|&)raw(?:&|$)/; function removeUrlQuery(url) { return url.replace(urlRE, '$1').replace(trailingSeparatorRE, ''); } const replacePercentageRE = /%/g; function injectQuery(url, queryToInject) { // encode percents for consistent behavior with pathToFileURL // see #2614 for details const resolvedUrl = new URL$3(url.replace(replacePercentageRE, '%25'), 'relative:///'); const { search, hash } = resolvedUrl; let pathname = cleanUrl(url); pathname = isWindows$5 ? slash$1(pathname) : pathname; return `${pathname}?${queryToInject}${search ? `&` + search.slice(1) : ''}${hash ?? ''}`; } const timestampRE = /\bt=\d{13}&?\b/; function removeTimestampQuery(url) { return url.replace(timestampRE, '').replace(trailingSeparatorRE, ''); } async function asyncReplace(input, re, replacer) { let match; let remaining = input; let rewritten = ''; while ((match = re.exec(remaining))) { rewritten += remaining.slice(0, match.index); rewritten += await replacer(match); remaining = remaining.slice(match.index + match[0].length); } rewritten += remaining; return rewritten; } function timeFrom(start, subtract = 0) { const time = performance.now() - start - subtract; const timeString = (time.toFixed(2) + `ms`).padEnd(5, ' '); if (time < 10) { return colors$1.green(timeString); } else if (time < 50) { return colors$1.yellow(timeString); } else { return colors$1.red(timeString); } } /** * pretty url for logging. */ function prettifyUrl(url, root) { url = removeTimestampQuery(url); const isAbsoluteFile = url.startsWith(root); if (isAbsoluteFile || url.startsWith(FS_PREFIX)) { const file = path$o.posix.relative(root, isAbsoluteFile ? url : fsPathFromId(url)); return colors$1.dim(file); } else { return colors$1.dim(url); } } function isObject$1(value) { return Object.prototype.toString.call(value) === '[object Object]'; } function isDefined(value) { return value != null; } function tryStatSync(file) { try { // The "throwIfNoEntry" is a performance optimization for cases where the file does not exist return fs$l.statSync(file, { throwIfNoEntry: false }); } catch { // Ignore errors } } function lookupFile(dir, fileNames) { while (dir) { for (const fileName of fileNames) { const fullPath = path$o.join(dir, fileName); if (tryStatSync(fullPath)?.isFile()) return fullPath; } const parentDir = path$o.dirname(dir); if (parentDir === dir) return; dir = parentDir; } } function isFilePathESM(filePath, packageCache) { if (/\.m[jt]s$/.test(filePath)) { return true; } else if (/\.c[jt]s$/.test(filePath)) { return false; } else { // check package.json for type: "module" try { const pkg = findNearestPackageData(path$o.dirname(filePath), packageCache); return pkg?.data.type === 'module'; } catch { return false; } } } const splitRE = /\r?\n/; const range = 2; function pad$1(source, n = 2) { const lines = source.split(splitRE); return lines.map((l) => ` `.repeat(n) + l).join(`\n`); } function posToNumber(source, pos) { if (typeof pos === 'number') return pos; const lines = source.split(splitRE); const { line, column } = pos; let start = 0; for (let i = 0; i < line - 1 && i < lines.length; i++) { start += lines[i].length + 1; } return start + column; } function numberToPos(source, offset) { if (typeof offset !== 'number') return offset; if (offset > source.length) { throw new Error(`offset is longer than source length! offset ${offset} > length ${source.length}`); } const lines = source.split(splitRE); let counted = 0; let line = 0; let column = 0; for (; line < lines.length; line++) { const lineLength = lines[line].length + 1; if (counted + lineLength >= offset) { column = offset - counted + 1; break; } counted += lineLength; } return { line: line + 1, column }; } function generateCodeFrame(source, start = 0, end) { start = Math.max(posToNumber(source, start), 0); end = Math.min(end !== undefined ? posToNumber(source, end) : start, source.length); const lines = source.split(splitRE); let count = 0; const res = []; for (let i = 0; i < lines.length; i++) { count += lines[i].length; if (count >= start) { for (let j = i - range; j <= i + range || end > count; j++) { if (j < 0 || j >= lines.length) continue; const line = j + 1; res.push(`${line}${' '.repeat(Math.max(3 - String(line).length, 0))}| ${lines[j]}`); const lineLength = lines[j].length; if (j === i) { // push underline const pad = Math.max(start - (count - lineLength), 0); const length = Math.max(1, end > count ? lineLength - pad : end - start); res.push(` | ` + ' '.repeat(pad) + '^'.repeat(length)); } else if (j > i) { if (end > count) { const length = Math.max(Math.min(end - count, lineLength), 1); res.push(` | ` + '^'.repeat(length)); } count += lineLength + 1; } } break; } count++; } return res.join('\n'); } function isFileReadable(filename) { if (!tryStatSync(filename)) { return false; } try { // Check if current process has read permission to the file fs$l.accessSync(filename, fs$l.constants.R_OK); return true; } catch { return false; } } const splitFirstDirRE = /(.+?)[\\/](.+)/; /** * Delete every file and subdirectory. **The given directory must exist.** * Pass an optional `skip` array to preserve files under the root directory. */ function emptyDir(dir, skip) { const skipInDir = []; let nested = null; if (skip?.length) { for (const file of skip) { if (path$o.dirname(file) !== '.') { const matched = file.match(splitFirstDirRE); if (matched) { nested ??= new Map(); const [, nestedDir, skipPath] = matched; let nestedSkip = nested.get(nestedDir); if (!nestedSkip) { nestedSkip = []; nested.set(nestedDir, nestedSkip); } if (!nestedSkip.includes(skipPath)) { nestedSkip.push(skipPath); } } } else { skipInDir.push(file); } } } for (const file of fs$l.readdirSync(dir)) { if (skipInDir.includes(file)) { continue; } if (nested?.has(file)) { emptyDir(path$o.resolve(dir, file), nested.get(file)); } else { fs$l.rmSync(path$o.resolve(dir, file), { recursive: true, force: true }); } } } function copyDir(srcDir, destDir) { fs$l.mkdirSync(destDir, { recursive: true }); for (const file of fs$l.readdirSync(srcDir)) { const srcFile = path$o.resolve(srcDir, file); if (srcFile === destDir) { continue; } const destFile = path$o.resolve(destDir, file); const stat = fs$l.statSync(srcFile); if (stat.isDirectory()) { copyDir(srcFile, destFile); } else { fs$l.copyFileSync(srcFile, destFile); } } } const ERR_SYMLINK_IN_RECURSIVE_READDIR = 'ERR_SYMLINK_IN_RECURSIVE_READDIR'; async function recursiveReaddir(dir) { if (!fs$l.existsSync(dir)) { return []; } let dirents; try { dirents = await fsp.readdir(dir, { withFileTypes: true }); } catch (e) { if (e.code === 'EACCES') { // Ignore permission errors return []; } throw e; } if (dirents.some((dirent) => dirent.isSymbolicLink())) { const err = new Error('Symbolic links are not supported in recursiveReaddir'); err.code = ERR_SYMLINK_IN_RECURSIVE_READDIR; throw err; } const files = await Promise.all(dirents.map((dirent) => { const res = path$o.resolve(dir, dirent.name); return dirent.isDirectory() ? recursiveReaddir(res) : normalizePath$3(res); })); return files.flat(1); } // `fs.realpathSync.native` resolves differently in Windows network drive, // causing file read errors. skip for now. // https://github.com/nodejs/node/issues/37737 let safeRealpathSync = isWindows$5 ? windowsSafeRealPathSync : fs$l.realpathSync.native; // Based on https://github.com/larrybahr/windows-network-drive // MIT License, Copyright (c) 2017 Larry Bahr const windowsNetworkMap = new Map(); function windowsMappedRealpathSync(path) { const realPath = fs$l.realpathSync.native(path); if (realPath.startsWith('\\\\')) { for (const [network, volume] of windowsNetworkMap) { if (realPath.startsWith(network)) return realPath.replace(network, volume); } } return realPath; } const parseNetUseRE = /^(\w+)? +(\w:) +([^ ]+)\s/; let firstSafeRealPathSyncRun = false; function windowsSafeRealPathSync(path) { if (!firstSafeRealPathSyncRun) { optimizeSafeRealPathSync(); firstSafeRealPathSyncRun = true; } return fs$l.realpathSync(path); } function optimizeSafeRealPathSync() { // Skip if using Node <18.10 due to MAX_PATH issue: https://github.com/vitejs/vite/issues/12931 const nodeVersion = process.versions.node.split('.').map(Number); if (nodeVersion[0] < 18 || (nodeVersion[0] === 18 && nodeVersion[1] < 10)) { safeRealpathSync = fs$l.realpathSync; return; } // Check the availability `fs.realpathSync.native` // in Windows virtual and RAM disks that bypass the Volume Mount Manager, in programs such as imDisk // get the error EISDIR: illegal operation on a directory try { fs$l.realpathSync.native(path$o.resolve('./')); } catch (error) { if (error.message.includes('EISDIR: illegal operation on a directory')) { safeRealpathSync = fs$l.realpathSync; return; } } exec('net use', (error, stdout) => { if (error) return; const lines = stdout.split('\n'); // OK Y: \\NETWORKA\Foo Microsoft Windows Network // OK Z: \\NETWORKA\Bar Microsoft Windows Network for (const line of lines) { const m = line.match(parseNetUseRE); if (m) windowsNetworkMap.set(m[3], m[2]); } if (windowsNetworkMap.size === 0) { safeRealpathSync = fs$l.realpathSync.native; } else { safeRealpathSync = windowsMappedRealpathSync; } }); } function ensureWatchedFile(watcher, file, root) { if (file && // only need to watch if out of root !file.startsWith(withTrailingSlash(root)) && // some rollup plugins use null bytes for private resolved Ids !file.includes('\0') && fs$l.existsSync(file)) { // resolve file to normalized system path watcher.add(path$o.resolve(file)); } } const escapedSpaceCharacters = /( |\\t|\\n|\\f|\\r)+/g; const imageSetUrlRE = /^(?:[\w\-]+\(.*?\)|'.*?'|".*?"|\S*)/; function joinSrcset(ret) { return ret .map(({ url, descriptor }) => url + (descriptor ? ` ${descriptor}` : '')) .join(', '); } function splitSrcSetDescriptor(srcs) { return splitSrcSet(srcs) .map((s) => { const src = s.replace(escapedSpaceCharacters, ' ').trim(); const url = imageSetUrlRE.exec(src)?.[0] ?? ''; return { url, descriptor: src.slice(url.length).trim(), }; }) .filter(({ url }) => !!url); } function processSrcSet(srcs, replacer) { return Promise.all(splitSrcSetDescriptor(srcs).map(async ({ url, descriptor }) => ({ url: await replacer({ url, descriptor }), descriptor, }))).then(joinSrcset); } function processSrcSetSync(srcs, replacer) { return joinSrcset(splitSrcSetDescriptor(srcs).map(({ url, descriptor }) => ({ url: replacer({ url, descriptor }), descriptor, }))); } const cleanSrcSetRE = /(?:url|image|gradient|cross-fade)\([^)]*\)|"([^"]|(?<=\\)")*"|'([^']|(?<=\\)')*'|data:\w+\/[\w.+\-]+;base64,[\w+/=]+/g; function splitSrcSet(srcs) { const parts = []; // There could be a ',' inside of url(data:...), linear-gradient(...), "data:..." or data:... const cleanedSrcs = srcs.replace(cleanSrcSetRE, blankReplacer); let startIndex = 0; let splitIndex; do { splitIndex = cleanedSrcs.indexOf(',', startIndex); parts.push(srcs.slice(startIndex, splitIndex !== -1 ? splitIndex : undefined)); startIndex = splitIndex + 1; } while (splitIndex !== -1); return parts; } const windowsDriveRE = /^[A-Z]:/; const replaceWindowsDriveRE = /^([A-Z]):\//; const linuxAbsolutePathRE = /^\/[^/]/; function escapeToLinuxLikePath(path) { if (windowsDriveRE.test(path)) { return path.replace(replaceWindowsDriveRE, '/windows/$1/'); } if (linuxAbsolutePathRE.test(path)) { return `/linux${path}`; } return path; } const revertWindowsDriveRE = /^\/windows\/([A-Z])\//; function unescapeToLinuxLikePath(path) { if (path.startsWith('/linux/')) { return path.slice('/linux'.length); } if (path.startsWith('/windows/')) { return path.replace(revertWindowsDriveRE, '$1:/'); } return path; } // based on https://github.com/sveltejs/svelte/blob/abf11bb02b2afbd3e4cac509a0f70e318c306364/src/compiler/utils/mapped_code.ts#L221 const nullSourceMap = { names: [], sources: [], mappings: '', version: 3, }; function combineSourcemaps(filename, sourcemapList) { if (sourcemapList.length === 0 || sourcemapList.every((m) => m.sources.length === 0)) { return { ...nullSourceMap }; } // hack for parse broken with normalized absolute paths on windows (C:/path/to/something). // escape them to linux like paths // also avoid mutation here to prevent breaking plugin's using cache to generate sourcemaps like vue (see #7442) sourcemapList = sourcemapList.map((sourcemap) => { const newSourcemaps = { ...sourcemap }; newSourcemaps.sources = sourcemap.sources.map((source) => source ? escapeToLinuxLikePath(source) : null); if (sourcemap.sourceRoot) { newSourcemaps.sourceRoot = escapeToLinuxLikePath(sourcemap.sourceRoot); } return newSourcemaps; }); const escapedFilename = escapeToLinuxLikePath(filename); // We don't declare type here so we can convert/fake/map as RawSourceMap let map; //: SourceMap let mapIndex = 1; const useArrayInterface = sourcemapList.slice(0, -1).find((m) => m.sources.length !== 1) === undefined; if (useArrayInterface) { map = remapping(sourcemapList, () => null); } else { map = remapping(sourcemapList[0], function loader(sourcefile) { if (sourcefile === escapedFilename && sourcemapList[mapIndex]) { return sourcemapList[mapIndex++]; } else { return null; } }); } if (!map.file) { delete map.file; } // unescape the previous hack map.sources = map.sources.map((source) => source ? unescapeToLinuxLikePath(source) : source); map.file = filename; return map; } function unique(arr) { return Array.from(new Set(arr)); } /** * Returns resolved localhost address when `dns.lookup` result differs from DNS * * `dns.lookup` result is same when defaultResultOrder is `verbatim`. * Even if defaultResultOrder is `ipv4first`, `dns.lookup` result maybe same. * For example, when IPv6 is not supported on that machine/network. */ async function getLocalhostAddressIfDiffersFromDNS() { const [nodeResult, dnsResult] = await Promise.all([ promises.lookup('localhost'), promises.lookup('localhost', { verbatim: true }), ]); const isSame = nodeResult.family === dnsResult.family && nodeResult.address === dnsResult.address; return isSame ? undefined : nodeResult.address; } function diffDnsOrderChange(oldUrls, newUrls) { return !(oldUrls === newUrls || (oldUrls && newUrls && arrayEqual(oldUrls.local, newUrls.local) && arrayEqual(oldUrls.network, newUrls.network))); } async function resolveHostname(optionsHost) { let host; if (optionsHost === undefined || optionsHost === false) { // Use a secure default host = 'localhost'; } else if (optionsHost === true) { // If passed --host in the CLI without arguments host = undefined; // undefined typically means 0.0.0.0 or :: (listen on all IPs) } else { host = optionsHost; } // Set host name to localhost when possible let name = host === undefined || wildcardHosts.has(host) ? 'localhost' : host; if (host === 'localhost') { // See #8647 for more details. const localhostAddr = await getLocalhostAddressIfDiffersFromDNS(); if (localhostAddr) { name = localhostAddr; } } return { host, name }; } async function resolveServerUrls(server, options, config) { const address = server.address(); const isAddressInfo = (x) => x?.address; if (!isAddressInfo(address)) { return { local: [], network: [] }; } const local = []; const network = []; const hostname = await resolveHostname(options.host); const protocol = options.https ? 'https' : 'http'; const port = address.port; const base = config.rawBase === './' || config.rawBase === '' ? '/' : config.rawBase; if (hostname.host !== undefined && !wildcardHosts.has(hostname.host)) { let hostnameName = hostname.name; // ipv6 host if (hostnameName.includes(':')) { hostnameName = `[${hostnameName}]`; } const address = `${protocol}://${hostnameName}:${port}${base}`; if (loopbackHosts.has(hostname.host)) { local.push(address); } else { network.push(address); } } else { Object.values(os$4.networkInterfaces()) .flatMap((nInterface) => nInterface ?? []) .filter((detail) => detail && detail.address && (detail.family === 'IPv4' || // @ts-expect-error Node 18.0 - 18.3 returns number detail.family === 4)) .forEach((detail) => { let host = detail.address.replace('127.0.0.1', hostname.name); // ipv6 host if (host.includes(':')) { host = `[${host}]`; } const url = `${protocol}://${host}:${port}${base}`; if (detail.address.includes('127.0.0.1')) { local.push(url); } else { network.push(url); } }); } return { local, network }; } function arraify(target) { return Array.isArray(target) ? target : [target]; } // Taken from https://stackoverflow.com/a/36328890 const multilineCommentsRE = /\/\*[^*]*\*+(?:[^/*][^*]*\*+)*\//g; const singlelineCommentsRE = /\/\/.*/g; const requestQuerySplitRE = /\?(?!.*[/|}])/; const requestQueryMaybeEscapedSplitRE = /\\?\?(?!.*[/|}])/; const blankReplacer = (match) => ' '.repeat(match.length); function getHash(text, length = 8) { const h = createHash$2('sha256').update(text).digest('hex').substring(0, length); if (length <= 64) return h; return h.padEnd(length, '_'); } const _dirname = path$o.dirname(fileURLToPath(import.meta.url)); const requireResolveFromRootWithFallback = (root, id) => { // check existence first, so if the package is not found, // it won't be cached by nodejs, since there isn't a way to invalidate them: // https://github.com/nodejs/node/issues/44663 const found = resolvePackageData(id, root) || resolvePackageData(id, _dirname); if (!found) { const error = new Error(`${JSON.stringify(id)} not found.`); error.code = 'MODULE_NOT_FOUND'; throw error; } // actually resolve // Search in the root directory first, and fallback to the default require paths. return _require$1.resolve(id, { paths: [root, _dirname] }); }; function emptyCssComments(raw) { return raw.replace(multilineCommentsRE, blankReplacer); } function backwardCompatibleWorkerPlugins(plugins) { if (Array.isArray(plugins)) { return plugins; } if (typeof plugins === 'function') { return plugins(); } return []; } function mergeConfigRecursively(defaults, overrides, rootPath) { const merged = { ...defaults }; for (const key in overrides) { const value = overrides[key]; if (value == null) { continue; } const existing = merged[key]; if (existing == null) { merged[key] = value; continue; } // fields that require special handling if (key === 'alias' && (rootPath === 'resolve' || rootPath === '')) { merged[key] = mergeAlias(existing, value); continue; } else if (key === 'assetsInclude' && rootPath === '') { merged[key] = [].concat(existing, value); continue; } else if (key === 'noExternal' && rootPath === 'ssr' && (existing === true || value === true)) { merged[key] = true; continue; } else if (key === 'plugins' && rootPath === 'worker') { merged[key] = () => [ ...backwardCompatibleWorkerPlugins(existing), ...backwardCompatibleWorkerPlugins(value), ]; continue; } if (Array.isArray(existing) || Array.isArray(value)) { merged[key] = [...arraify(existing), ...arraify(value)]; continue; } if (isObject$1(existing) && isObject$1(value)) { merged[key] = mergeConfigRecursively(existing, value, rootPath ? `${rootPath}.${key}` : key); continue; } merged[key] = value; } return merged; } function mergeConfig(defaults, overrides, isRoot = true) { if (typeof defaults === 'function' || typeof overrides === 'function') { throw new Error(`Cannot merge config in form of callback`); } return mergeConfigRecursively(defaults, overrides, isRoot ? '' : '.'); } function mergeAlias(a, b) { if (!a) return b; if (!b) return a; if (isObject$1(a) && isObject$1(b)) { return { ...a, ...b }; } // the order is flipped because the alias is resolved from top-down, // where the later should have higher priority return [...normalizeAlias(b), ...normalizeAlias(a)]; } function normalizeAlias(o = []) { return Array.isArray(o) ? o.map(normalizeSingleAlias) : Object.keys(o).map((find) => normalizeSingleAlias({ find, replacement: o[find], })); } // https://github.com/vitejs/vite/issues/1363 // work around https://github.com/rollup/plugins/issues/759 function normalizeSingleAlias({ find, replacement, customResolver, }) { if (typeof find === 'string' && find[find.length - 1] === '/' && replacement[replacement.length - 1] === '/') { find = find.slice(0, find.length - 1); replacement = replacement.slice(0, replacement.length - 1); } const alias = { find, replacement, }; if (customResolver) { alias.customResolver = customResolver; } return alias; } /** * Transforms transpiled code result where line numbers aren't altered, * so we can skip sourcemap generation during dev */ function transformStableResult(s, id, config) { return { code: s.toString(), map: config.command === 'build' && config.build.sourcemap ? s.generateMap({ hires: 'boundary', source: id }) : null, }; } async function asyncFlatten(arr) { do { arr = (await Promise.all(arr)).flat(Infinity); } while (arr.some((v) => v?.then)); return arr; } // strip UTF-8 BOM function stripBomTag(content) { if (content.charCodeAt(0) === 0xfeff) { return content.slice(1); } return content; } const windowsDrivePathPrefixRE = /^[A-Za-z]:[/\\]/; /** * path.isAbsolute also returns true for drive relative paths on windows (e.g. /something) * this function returns false for them but true for absolute paths (e.g. C:/something) */ const isNonDriveRelativeAbsolutePath = (p) => { if (!isWindows$5) return p[0] === '/'; return windowsDrivePathPrefixRE.test(p); }; /** * Determine if a file is being requested with the correct case, to ensure * consistent behavior between dev and prod and across operating systems. */ function shouldServeFile(filePath, root) { // can skip case check on Linux if (!isCaseInsensitiveFS) return true; return hasCorrectCase(filePath, root); } /** * Note that we can't use realpath here, because we don't want to follow * symlinks. */ function hasCorrectCase(file, assets) { if (file === assets) return true; const parent = path$o.dirname(file); if (fs$l.readdirSync(parent).includes(path$o.basename(file))) { return hasCorrectCase(parent, assets); } return false; } function joinUrlSegments(a, b) { if (!a || !b) { return a || b || ''; } if (a[a.length - 1] === '/') { a = a.substring(0, a.length - 1); } if (b[0] !== '/') { b = '/' + b; } return a + b; } function removeLeadingSlash(str) { return str[0] === '/' ? str.slice(1) : str; } function stripBase(path, base) { if (path === base) { return '/'; } const devBase = withTrailingSlash(base); return path.startsWith(devBase) ? path.slice(devBase.length - 1) : path; } function arrayEqual(a, b) { if (a === b) return true; if (a.length !== b.length) return false; for (let i = 0; i < a.length; i++) { if (a[i] !== b[i]) return false; } return true; } function evalValue(rawValue) { const fn = new Function(` var console, exports, global, module, process, require return (\n${rawValue}\n) `); return fn(); } function getNpmPackageName(importPath) { const parts = importPath.split('/'); if (parts[0][0] === '@') { if (!parts[1]) return null; return `${parts[0]}/${parts[1]}`; } else { return parts[0]; } } const escapeRegexRE = /[-/\\^$*+?.()|[\]{}]/g; function escapeRegex(str) { return str.replace(escapeRegexRE, '\\$&'); } function getPackageManagerCommand(type = 'install') { const packageManager = process.env.npm_config_user_agent?.split(' ')[0].split('/')[0] || 'npm'; switch (type) { case 'install': return packageManager === 'npm' ? 'npm install' : `${packageManager} add`; case 'uninstall': return packageManager === 'npm' ? 'npm uninstall' : `${packageManager} remove`; case 'update': return packageManager === 'yarn' ? 'yarn upgrade' : `${packageManager} update`; default: throw new TypeError(`Unknown command type: ${type}`); } } function isDevServer(server) { return 'pluginContainer' in server; } function promiseWithResolvers() { let resolve; let reject; const promise = new Promise((_resolve, _reject) => { resolve = _resolve; reject = _reject; }); return { promise, resolve, reject }; } function createSerialPromiseQueue() { let previousTask; return { async run(f) { const thisTask = f(); // wait for both the previous task and this task // so that this function resolves in the order this function is called const depTasks = Promise.all([previousTask, thisTask]); previousTask = depTasks; const [, result] = await depTasks; // this task was the last one, clear `previousTask` to free up memory if (previousTask === depTasks) { previousTask = undefined; } return result; }, }; } function sortObjectKeys(obj) { const sorted = {}; for (const key of Object.keys(obj).sort()) { sorted[key] = obj[key]; } return sorted; } function displayTime(time) { // display: {X}ms if (time < 1000) { return `${time}ms`; } time = time / 1000; // display: {X}s if (time < 60) { return `${time.toFixed(2)}s`; } const mins = parseInt((time / 60).toString()); const seconds = time % 60; // display: {X}m {Y}s return `${mins}m${seconds < 1 ? '' : ` ${seconds.toFixed(0)}s`}`; } /* eslint no-console: 0 */ const LogLevels = { silent: 0, error: 1, warn: 2, info: 3, }; let lastType; let lastMsg; let sameCount = 0; function clearScreen() { const repeatCount = process.stdout.rows - 2; const blank = repeatCount > 0 ? '\n'.repeat(repeatCount) : ''; console.log(blank); readline.cursorTo(process.stdout, 0, 0); readline.clearScreenDown(process.stdout); } // Only initialize the timeFormatter when the timestamp option is used, and // reuse it across all loggers let timeFormatter; function getTimeFormatter() { timeFormatter ??= new Intl.DateTimeFormat(undefined, { hour: 'numeric', minute: 'numeric', second: 'numeric', }); return timeFormatter; } function createLogger(level = 'info', options = {}) { if (options.customLogger) { return options.customLogger; } const loggedErrors = new WeakSet(); const { prefix = '[vite]', allowClearScreen = true } = options; const thresh = LogLevels[level]; const canClearScreen = allowClearScreen && process.stdout.isTTY && !process.env.CI; const clear = canClearScreen ? clearScreen : () => { }; function format(type, msg, options = {}) { if (options.timestamp) { const tag = type === 'info' ? colors$1.cyan(colors$1.bold(prefix)) : type === 'warn' ? colors$1.yellow(colors$1.bold(prefix)) : colors$1.red(colors$1.bold(prefix)); return `${colors$1.dim(getTimeFormatter().format(new Date()))} ${tag} ${msg}`; } else { return msg; } } function output(type, msg, options = {}) { if (thresh >= LogLevels[type]) { const method = type === 'info' ? 'log' : type; if (options.error) { loggedErrors.add(options.error); } if (canClearScreen) { if (type === lastType && msg === lastMsg) { sameCount++; clear(); console[method](format(type, msg, options), colors$1.yellow(`(x${sameCount + 1})`)); } else { sameCount = 0; lastMsg = msg; lastType = type; if (options.clear) { clear(); } console[method](format(type, msg, options)); } } else { console[method](format(type, msg, options)); } } } const warnedMessages = new Set(); const logger = { hasWarned: false, info(msg, opts) { output('info', msg, opts); }, warn(msg, opts) { logger.hasWarned = true; output('warn', msg, opts); }, warnOnce(msg, opts) { if (warnedMessages.has(msg)) return; logger.hasWarned = true; output('warn', msg, opts); warnedMessages.add(msg); }, error(msg, opts) { logger.hasWarned = true; output('error', msg, opts); }, clearScreen(type) { if (thresh >= LogLevels[type]) { clear(); } }, hasErrorLogged(error) { return loggedErrors.has(error); }, }; return logger; } function printServerUrls(urls, optionsHost, info) { const colorUrl = (url) => colors$1.cyan(url.replace(/:(\d+)\//, (_, port) => `:${colors$1.bold(port)}/`)); for (const url of urls.local) { info(` ${colors$1.green('➜')} ${colors$1.bold('Local')}: ${colorUrl(url)}`); } for (const url of urls.network) { info(` ${colors$1.green('➜')} ${colors$1.bold('Network')}: ${colorUrl(url)}`); } if (urls.network.length === 0 && optionsHost === undefined) { info(colors$1.dim(` ${colors$1.green('➜')} ${colors$1.bold('Network')}: use `) + colors$1.bold('--host') + colors$1.dim(' to expose')); } } const groups = [ { name: 'Assets', color: colors$1.green }, { name: 'CSS', color: colors$1.magenta }, { name: 'JS', color: colors$1.cyan }, ]; const COMPRESSIBLE_ASSETS_RE = /\.(?:html|json|svg|txt|xml|xhtml)$/; function buildReporterPlugin(config) { const compress = promisify$4(gzip); const chunkLimit = config.build.chunkSizeWarningLimit; const numberFormatter = new Intl.NumberFormat('en', { maximumFractionDigits: 2, minimumFractionDigits: 2, }); const displaySize = (bytes) => { return `${numberFormatter.format(bytes / 1000)} kB`; }; const tty = process.stdout.isTTY && !process.env.CI; const shouldLogInfo = LogLevels[config.logLevel || 'info'] >= LogLevels.info; let hasTransformed = false; let hasRenderedChunk = false; let hasCompressChunk = false; let transformedCount = 0; let chunkCount = 0; let compressedCount = 0; async function getCompressedSize(code) { if (config.build.ssr || !config.build.reportCompressedSize) { return null; } if (shouldLogInfo && !hasCompressChunk) { if (!tty) { config.logger.info('computing gzip size...'); } else { writeLine('computing gzip size (0)...'); } hasCompressChunk = true; } const compressed = await compress(typeof code === 'string' ? code : Buffer.from(code)); compressedCount++; if (shouldLogInfo && tty) { writeLine(`computing gzip size (${compressedCount})...`); } return compressed.length; } const logTransform = throttle((id) => { writeLine(`transforming (${transformedCount}) ${colors$1.dim(path$o.relative(config.root, id))}`); }); return { name: 'vite:reporter', transform(_, id) { transformedCount++; if (shouldLogInfo) { if (!tty) { if (!hasTransformed) { config.logger.info(`transforming...`); } } else { if (id.includes(`?`)) return; logTransform(id); } hasTransformed = true; } return null; }, buildStart() { transformedCount = 0; }, buildEnd() { if (shouldLogInfo) { if (tty) { clearLine$1(); } config.logger.info(`${colors$1.green(`✓`)} ${transformedCount} modules transformed.`); } }, renderStart() { chunkCount = 0; compressedCount = 0; }, renderChunk(code, chunk, options) { if (!options.inlineDynamicImports) { for (const id of chunk.moduleIds) { const module = this.getModuleInfo(id); if (!module) continue; // When a dynamic importer shares a chunk with the imported module, // warn that the dynamic imported module will not be moved to another chunk (#12850). if (module.importers.length && module.dynamicImporters.length) { // Filter out the intersection of dynamic importers and sibling modules in // the same chunk. The intersecting dynamic importers' dynamic import is not // expected to work. Note we're only detecting the direct ineffective // dynamic import here. const detectedIneffectiveDynamicImport = module.dynamicImporters.some((id) => !isInNodeModules$1(id) && chunk.moduleIds.includes(id)); if (detectedIneffectiveDynamicImport) { this.warn(`\n(!) ${module.id} is dynamically imported by ${module.dynamicImporters.join(', ')} but also statically imported by ${module.importers.join(', ')}, dynamic import will not move module into another chunk.\n`); } } } } chunkCount++; if (shouldLogInfo) { if (!tty) { if (!hasRenderedChunk) { config.logger.info('rendering chunks...'); } } else { writeLine(`rendering chunks (${chunkCount})...`); } hasRenderedChunk = true; } return null; }, generateBundle() { if (shouldLogInfo && tty) clearLine$1(); }, async writeBundle({ dir: outDir }, output) { let hasLargeChunks = false; if (shouldLogInfo) { const entries = (await Promise.all(Object.values(output).map(async (chunk) => { if (chunk.type === 'chunk') { return { name: chunk.fileName, group: 'JS', size: chunk.code.length, compressedSize: await getCompressedSize(chunk.code), mapSize: chunk.map ? chunk.map.toString().length : null, }; } else { if (chunk.fileName.endsWith('.map')) return null; const isCSS = chunk.fileName.endsWith('.css'); const isCompressible = isCSS || COMPRESSIBLE_ASSETS_RE.test(chunk.fileName); return { name: chunk.fileName, group: isCSS ? 'CSS' : 'Assets', size: chunk.source.length, mapSize: null, compressedSize: isCompressible ? await getCompressedSize(chunk.source) : null, }; } }))).filter(isDefined); if (tty) clearLine$1(); let longest = 0; let biggestSize = 0; let biggestMap = 0; let biggestCompressSize = 0; for (const entry of entries) { if (entry.name.length > longest) longest = entry.name.length; if (entry.size > biggestSize) biggestSize = entry.size; if (entry.mapSize && entry.mapSize > biggestMap) { biggestMap = entry.mapSize; } if (entry.compressedSize && entry.compressedSize > biggestCompressSize) { biggestCompressSize = entry.compressedSize; } } const sizePad = displaySize(biggestSize).length; const mapPad = displaySize(biggestMap).length; const compressPad = displaySize(biggestCompressSize).length; const relativeOutDir = normalizePath$3(path$o.relative(config.root, path$o.resolve(config.root, outDir ?? config.build.outDir))); const assetsDir = path$o.join(config.build.assetsDir, '/'); for (const group of groups) { const filtered = entries.filter((e) => e.group === group.name); if (!filtered.length) continue; for (const entry of filtered.sort((a, z) => a.size - z.size)) { const isLarge = group.name === 'JS' && entry.size / 1000 > chunkLimit; if (isLarge) hasLargeChunks = true; const sizeColor = isLarge ? colors$1.yellow : colors$1.dim; let log = colors$1.dim(withTrailingSlash(relativeOutDir)); log += !config.build.lib && entry.name.startsWith(withTrailingSlash(assetsDir)) ? colors$1.dim(assetsDir) + group.color(entry.name .slice(assetsDir.length) .padEnd(longest + 2 - assetsDir.length)) : group.color(entry.name.padEnd(longest + 2)); log += colors$1.bold(sizeColor(displaySize(entry.size).padStart(sizePad))); if (entry.compressedSize) { log += colors$1.dim(` │ gzip: ${displaySize(entry.compressedSize).padStart(compressPad)}`); } if (entry.mapSize) { log += colors$1.dim(` │ map: ${displaySize(entry.mapSize).padStart(mapPad)}`); } config.logger.info(log); } } } else { hasLargeChunks = Object.values(output).some((chunk) => { return chunk.type === 'chunk' && chunk.code.length / 1000 > chunkLimit; }); } if (hasLargeChunks && config.build.minify && !config.build.lib && !config.build.ssr) { config.logger.warn(colors$1.yellow(`\n(!) Some chunks are larger than ${chunkLimit} kB after minification. Consider:\n` + `- Using dynamic import() to code-split the application\n` + `- Use build.rollupOptions.output.manualChunks to improve chunking: https://rollupjs.org/configuration-options/#output-manualchunks\n` + `- Adjust chunk size limit for this warning via build.chunkSizeWarningLimit.`)); } }, }; } function writeLine(output) { clearLine$1(); if (output.length < process.stdout.columns) { process.stdout.write(output); } else { process.stdout.write(output.substring(0, process.stdout.columns - 1)); } } function clearLine$1() { process.stdout.clearLine(0); process.stdout.cursorTo(0); } function throttle(fn) { let timerHandle = null; return (...args) => { if (timerHandle) return; fn(...args); timerHandle = setTimeout(() => { timerHandle = null; }, 100); }; } const POSIX_SEP_RE = new RegExp('\\' + path$o.posix.sep, 'g'); const NATIVE_SEP_RE = new RegExp('\\' + path$o.sep, 'g'); /** @type {Map}*/ const PATTERN_REGEX_CACHE = new Map(); const GLOB_ALL_PATTERN = `**/*`; const TS_EXTENSIONS = ['.ts', '.tsx', '.mts', '.cts']; const JS_EXTENSIONS = ['.js', '.jsx', '.mjs', '.cjs']; const TSJS_EXTENSIONS = TS_EXTENSIONS.concat(JS_EXTENSIONS); const TS_EXTENSIONS_RE_GROUP = `\\.(?:${TS_EXTENSIONS.map((ext) => ext.substring(1)).join('|')})`; const TSJS_EXTENSIONS_RE_GROUP = `\\.(?:${TSJS_EXTENSIONS.map((ext) => ext.substring(1)).join( '|' )})`; const IS_POSIX = path$o.posix.sep === path$o.sep; /** * @template T * @returns {{resolve:(result:T)=>void, reject:(error:any)=>void, promise: Promise}} */ function makePromise() { let resolve, reject; const promise = new Promise((res, rej) => { resolve = res; reject = rej; }); return { promise, resolve, reject }; } /** * @param {string} filename * @param {import('./cache.js').TSConfckCache} [cache] * @returns {Promise} */ async function resolveTSConfigJson(filename, cache) { if (path$o.extname(filename) !== '.json') { return; // ignore files that are not json } const tsconfig = path$o.resolve(filename); if (cache && (cache.hasParseResult(tsconfig) || cache.hasParseResult(filename))) { return tsconfig; } return promises$1.stat(tsconfig).then((stat) => { if (stat.isFile() || stat.isFIFO()) { return tsconfig; } else { throw new Error(`${filename} exists but is not a regular file.`); } }); } /** * * @param {string} dir an absolute directory path * @returns {boolean} if dir path includes a node_modules segment */ const isInNodeModules = IS_POSIX ? (dir) => dir.includes('/node_modules/') : (dir) => dir.match(/[/\\]node_modules[/\\]/); /** * convert posix separator to native separator * * eg. * windows: C:/foo/bar -> c:\foo\bar * linux: /foo/bar -> /foo/bar * * @param {string} filename with posix separators * @returns {string} filename with native separators */ const posix2native = IS_POSIX ? (filename) => filename : (filename) => filename.replace(POSIX_SEP_RE, path$o.sep); /** * convert native separator to posix separator * * eg. * windows: C:\foo\bar -> c:/foo/bar * linux: /foo/bar -> /foo/bar * * @param {string} filename - filename with native separators * @returns {string} filename with posix separators */ const native2posix = IS_POSIX ? (filename) => filename : (filename) => filename.replace(NATIVE_SEP_RE, path$o.posix.sep); /** * converts params to native separator, resolves path and converts native back to posix * * needed on windows to handle posix paths in tsconfig * * @param dir {string|null} directory to resolve from * @param filename {string} filename or pattern to resolve * @returns string */ const resolve2posix = IS_POSIX ? (dir, filename) => (dir ? path$o.resolve(dir, filename) : path$o.resolve(filename)) : (dir, filename) => native2posix( dir ? path$o.resolve(posix2native(dir), posix2native(filename)) : path$o.resolve(posix2native(filename)) ); /** * * @param {import('./public.d.ts').TSConfckParseResult} result * @param {import('./public.d.ts').TSConfckParseOptions} [options] * @returns {string[]} */ function resolveReferencedTSConfigFiles(result, options) { const dir = path$o.dirname(result.tsconfigFile); return result.tsconfig.references.map((ref) => { const refPath = ref.path.endsWith('.json') ? ref.path : path$o.join(ref.path, options?.configName ?? 'tsconfig.json'); return resolve2posix(dir, refPath); }); } /** * @param {string} filename * @param {import('./public.d.ts').TSConfckParseResult} result * @returns {import('./public.d.ts').TSConfckParseResult} */ function resolveSolutionTSConfig(filename, result) { const allowJs = result.tsconfig.compilerOptions?.allowJs; const extensions = allowJs ? TSJS_EXTENSIONS : TS_EXTENSIONS; if ( result.referenced && extensions.some((ext) => filename.endsWith(ext)) && !isIncluded(filename, result) ) { const solutionTSConfig = result.referenced.find((referenced) => isIncluded(filename, referenced) ); if (solutionTSConfig) { return solutionTSConfig; } } return result; } /** * * @param {string} filename * @param {import('./public.d.ts').TSConfckParseResult} result * @returns {boolean} */ function isIncluded(filename, result) { const dir = native2posix(path$o.dirname(result.tsconfigFile)); const files = (result.tsconfig.files || []).map((file) => resolve2posix(dir, file)); const absoluteFilename = resolve2posix(null, filename); if (files.includes(filename)) { return true; } const allowJs = result.tsconfig.compilerOptions?.allowJs; const isIncluded = isGlobMatch( absoluteFilename, dir, result.tsconfig.include || (result.tsconfig.files ? [] : [GLOB_ALL_PATTERN]), allowJs ); if (isIncluded) { const isExcluded = isGlobMatch(absoluteFilename, dir, result.tsconfig.exclude || [], allowJs); return !isExcluded; } return false; } /** * test filenames agains glob patterns in tsconfig * * @param filename {string} posix style abolute path to filename to test * @param dir {string} posix style absolute path to directory of tsconfig containing patterns * @param patterns {string[]} glob patterns to match against * @param allowJs {boolean} allowJs setting in tsconfig to include js extensions in checks * @returns {boolean} true when at least one pattern matches filename */ function isGlobMatch(filename, dir, patterns, allowJs) { const extensions = allowJs ? TSJS_EXTENSIONS : TS_EXTENSIONS; return patterns.some((pattern) => { // filename must end with part of pattern that comes after last wildcard let lastWildcardIndex = pattern.length; let hasWildcard = false; for (let i = pattern.length - 1; i > -1; i--) { if (pattern[i] === '*' || pattern[i] === '?') { lastWildcardIndex = i; hasWildcard = true; break; } } // if pattern does not end with wildcard, filename must end with pattern after last wildcard if ( lastWildcardIndex < pattern.length - 1 && !filename.endsWith(pattern.slice(lastWildcardIndex + 1)) ) { return false; } // if pattern ends with *, filename must end with a default extension if (pattern.endsWith('*') && !extensions.some((ext) => filename.endsWith(ext))) { return false; } // for **/* , filename must start with the dir if (pattern === GLOB_ALL_PATTERN) { return filename.startsWith(`${dir}/`); } const resolvedPattern = resolve2posix(dir, pattern); // filename must start with part of pattern that comes before first wildcard let firstWildcardIndex = -1; for (let i = 0; i < resolvedPattern.length; i++) { if (resolvedPattern[i] === '*' || resolvedPattern[i] === '?') { firstWildcardIndex = i; hasWildcard = true; break; } } if ( firstWildcardIndex > 1 && !filename.startsWith(resolvedPattern.slice(0, firstWildcardIndex - 1)) ) { return false; } // if no wildcard in pattern, filename must be equal to resolved pattern if (!hasWildcard) { return filename === resolvedPattern; } // complex pattern, use regex to check it if (PATTERN_REGEX_CACHE.has(resolvedPattern)) { return PATTERN_REGEX_CACHE.get(resolvedPattern).test(filename); } const regex = pattern2regex(resolvedPattern, allowJs); PATTERN_REGEX_CACHE.set(resolvedPattern, regex); return regex.test(filename); }); } /** * @param {string} resolvedPattern * @param {boolean} allowJs * @returns {RegExp} */ function pattern2regex(resolvedPattern, allowJs) { let regexStr = '^'; for (let i = 0; i < resolvedPattern.length; i++) { const char = resolvedPattern[i]; if (char === '?') { regexStr += '[^\\/]'; continue; } if (char === '*') { if (resolvedPattern[i + 1] === '*' && resolvedPattern[i + 2] === '/') { i += 2; regexStr += '(?:[^\\/]*\\/)*'; // zero or more path segments continue; } regexStr += '[^\\/]*'; continue; } if ('/.+^${}()|[]\\'.includes(char)) { regexStr += `\\`; } regexStr += char; } // add known file endings if pattern ends on * if (resolvedPattern.endsWith('*')) { regexStr += allowJs ? TSJS_EXTENSIONS_RE_GROUP : TS_EXTENSIONS_RE_GROUP; } regexStr += '$'; return new RegExp(regexStr); } /** * find the closest tsconfig.json file * * @param {string} filename - path to file to find tsconfig for (absolute or relative to cwd) * @param {import('./public.d.ts').TSConfckFindOptions} [options] - options * @returns {Promise} absolute path to closest tsconfig.json or null if not found */ async function find(filename, options) { let dir = path$o.dirname(path$o.resolve(filename)); if (options?.ignoreNodeModules && isInNodeModules(dir)) { return null; } const cache = options?.cache; const configName = options?.configName ?? 'tsconfig.json'; if (cache?.hasConfigPath(dir, configName)) { return cache.getConfigPath(dir, configName); } const { /** @type {Promise} */ promise, resolve, reject } = makePromise(); if (options?.root && !path$o.isAbsolute(options.root)) { options.root = path$o.resolve(options.root); } findUp(dir, { promise, resolve, reject }, options); return promise; } /** * * @param {string} dir * @param {{promise:Promise,resolve:(result:string|null)=>void,reject:(err:any)=>void}} madePromise * @param {import('./public.d.ts').TSConfckFindOptions} [options] - options */ function findUp(dir, { resolve, reject, promise }, options) { const { cache, root, configName } = options ?? {}; if (cache) { if (cache.hasConfigPath(dir, configName)) { let cached; try { cached = cache.getConfigPath(dir, configName); } catch (e) { reject(e); return; } if (cached?.then) { cached.then(resolve).catch(reject); } else { resolve(cached); } } else { cache.setConfigPath(dir, promise, configName); } } const tsconfig = path$o.join(dir, options?.configName ?? 'tsconfig.json'); fs$l.stat(tsconfig, (err, stats) => { if (stats && (stats.isFile() || stats.isFIFO())) { resolve(tsconfig); } else if (err?.code !== 'ENOENT') { reject(err); } else { let parent; if (root === dir || (parent = path$o.dirname(dir)) === dir) { resolve(null); } else { findUp(parent, { promise, resolve, reject }, options); } } }); } /* this file contains code from strip-bom and strip-json-comments by Sindre Sorhus https://github.com/sindresorhus/strip-json-comments/blob/v4.0.0/index.js https://github.com/sindresorhus/strip-bom/blob/v5.0.0/index.js licensed under MIT, see ../LICENSE */ /** * convert content of tsconfig.json to regular json * * @param {string} tsconfigJson - content of tsconfig.json * @returns {string} content as regular json, comments and dangling commas have been replaced with whitespace */ function toJson(tsconfigJson) { const stripped = stripDanglingComma(stripJsonComments(stripBom(tsconfigJson))); if (stripped.trim() === '') { // only whitespace left after stripping, return empty object so that JSON.parse still works return '{}'; } else { return stripped; } } /** * replace dangling commas from pseudo-json string with single space * implementation heavily inspired by strip-json-comments * * @param {string} pseudoJson * @returns {string} */ function stripDanglingComma(pseudoJson) { let insideString = false; let offset = 0; let result = ''; let danglingCommaPos = null; for (let i = 0; i < pseudoJson.length; i++) { const currentCharacter = pseudoJson[i]; if (currentCharacter === '"') { const escaped = isEscaped(pseudoJson, i); if (!escaped) { insideString = !insideString; } } if (insideString) { danglingCommaPos = null; continue; } if (currentCharacter === ',') { danglingCommaPos = i; continue; } if (danglingCommaPos) { if (currentCharacter === '}' || currentCharacter === ']') { result += pseudoJson.slice(offset, danglingCommaPos) + ' '; offset = danglingCommaPos + 1; danglingCommaPos = null; } else if (!currentCharacter.match(/\s/)) { danglingCommaPos = null; } } } return result + pseudoJson.substring(offset); } // start strip-json-comments /** * * @param {string} jsonString * @param {number} quotePosition * @returns {boolean} */ function isEscaped(jsonString, quotePosition) { let index = quotePosition - 1; let backslashCount = 0; while (jsonString[index] === '\\') { index -= 1; backslashCount += 1; } return Boolean(backslashCount % 2); } /** * * @param {string} string * @param {number?} start * @param {number?} end */ function strip(string, start, end) { return string.slice(start, end).replace(/\S/g, ' '); } const singleComment = Symbol('singleComment'); const multiComment = Symbol('multiComment'); /** * @param {string} jsonString * @returns {string} */ function stripJsonComments(jsonString) { let isInsideString = false; /** @type {false | symbol} */ let isInsideComment = false; let offset = 0; let result = ''; for (let index = 0; index < jsonString.length; index++) { const currentCharacter = jsonString[index]; const nextCharacter = jsonString[index + 1]; if (!isInsideComment && currentCharacter === '"') { const escaped = isEscaped(jsonString, index); if (!escaped) { isInsideString = !isInsideString; } } if (isInsideString) { continue; } if (!isInsideComment && currentCharacter + nextCharacter === '//') { result += jsonString.slice(offset, index); offset = index; isInsideComment = singleComment; index++; } else if (isInsideComment === singleComment && currentCharacter + nextCharacter === '\r\n') { index++; isInsideComment = false; result += strip(jsonString, offset, index); offset = index; } else if (isInsideComment === singleComment && currentCharacter === '\n') { isInsideComment = false; result += strip(jsonString, offset, index); offset = index; } else if (!isInsideComment && currentCharacter + nextCharacter === '/*') { result += jsonString.slice(offset, index); offset = index; isInsideComment = multiComment; index++; } else if (isInsideComment === multiComment && currentCharacter + nextCharacter === '*/') { index++; isInsideComment = false; result += strip(jsonString, offset, index + 1); offset = index + 1; } } return result + (isInsideComment ? strip(jsonString.slice(offset)) : jsonString.slice(offset)); } // end strip-json-comments // start strip-bom /** * @param {string} string * @returns {string} */ function stripBom(string) { // Catches EFBBBF (UTF-8 BOM) because the buffer-to-string // conversion translates it to FEFF (UTF-16 BOM). if (string.charCodeAt(0) === 0xfeff) { return string.slice(1); } return string; } // end strip-bom const not_found_result = { tsconfigFile: null, tsconfig: {} }; /** * parse the closest tsconfig.json file * * @param {string} filename - path to a tsconfig .json or a source file or directory (absolute or relative to cwd) * @param {import('./public.d.ts').TSConfckParseOptions} [options] - options * @returns {Promise} * @throws {TSConfckParseError} */ async function parse$f(filename, options) { /** @type {import('./cache.js').TSConfckCache} */ const cache = options?.cache; if (cache?.hasParseResult(filename)) { return getParsedDeep(filename, cache, options); } const { resolve, reject, /** @type {Promise}*/ promise } = makePromise(); cache?.setParseResult(filename, promise, true); try { let tsconfigFile = (await resolveTSConfigJson(filename, cache)) || (await find(filename, options)); if (!tsconfigFile) { resolve(not_found_result); return promise; } let result; if (filename !== tsconfigFile && cache?.hasParseResult(tsconfigFile)) { result = await getParsedDeep(tsconfigFile, cache, options); } else { result = await parseFile$1(tsconfigFile, cache, filename === tsconfigFile); await Promise.all([parseExtends(result, cache), parseReferences(result, options)]); } resolve(resolveSolutionTSConfig(filename, result)); } catch (e) { reject(e); } return promise; } /** * ensure extends and references are parsed * * @param {string} filename - cached file * @param {import('./cache.js').TSConfckCache} cache - cache * @param {import('./public.d.ts').TSConfckParseOptions} options - options */ async function getParsedDeep(filename, cache, options) { const result = await cache.getParseResult(filename); if ( (result.tsconfig.extends && !result.extended) || (result.tsconfig.references && !result.referenced) ) { const promise = Promise.all([ parseExtends(result, cache), parseReferences(result, options) ]).then(() => result); cache.setParseResult(filename, promise, true); return promise; } return result; } /** * * @param {string} tsconfigFile - path to tsconfig file * @param {import('./cache.js').TSConfckCache} [cache] - cache * @param {boolean} [skipCache] - skip cache * @returns {Promise} */ async function parseFile$1(tsconfigFile, cache, skipCache) { if ( !skipCache && cache?.hasParseResult(tsconfigFile) && !cache.getParseResult(tsconfigFile)._isRootFile_ ) { return cache.getParseResult(tsconfigFile); } const promise = promises$1 .readFile(tsconfigFile, 'utf-8') .then(toJson) .then((json) => { const parsed = JSON.parse(json); applyDefaults(parsed, tsconfigFile); return { tsconfigFile, tsconfig: normalizeTSConfig(parsed, path$o.dirname(tsconfigFile)) }; }) .catch((e) => { throw new TSConfckParseError( `parsing ${tsconfigFile} failed: ${e}`, 'PARSE_FILE', tsconfigFile, e ); }); if ( !skipCache && (!cache?.hasParseResult(tsconfigFile) || !cache.getParseResult(tsconfigFile)._isRootFile_) ) { cache?.setParseResult(tsconfigFile, promise); } return promise; } /** * normalize to match the output of ts.parseJsonConfigFileContent * * @param {any} tsconfig - typescript tsconfig output * @param {string} dir - directory */ function normalizeTSConfig(tsconfig, dir) { // set baseUrl to absolute path if (tsconfig.compilerOptions?.baseUrl && !path$o.isAbsolute(tsconfig.compilerOptions.baseUrl)) { tsconfig.compilerOptions.baseUrl = resolve2posix(dir, tsconfig.compilerOptions.baseUrl); } return tsconfig; } /** * * @param {import('./public.d.ts').TSConfckParseResult} result * @param {import('./public.d.ts').TSConfckParseOptions} [options] * @returns {Promise} */ async function parseReferences(result, options) { if (!result.tsconfig.references) { return; } const referencedFiles = resolveReferencedTSConfigFiles(result, options); const referenced = await Promise.all( referencedFiles.map((file) => parseFile$1(file, options?.cache)) ); await Promise.all(referenced.map((ref) => parseExtends(ref, options?.cache))); referenced.forEach((ref) => { ref.solution = result; }); result.referenced = referenced; } /** * @param {import('./public.d.ts').TSConfckParseResult} result * @param {import('./cache.js').TSConfckCache}[cache] * @returns {Promise} */ async function parseExtends(result, cache) { if (!result.tsconfig.extends) { return; } // use result as first element in extended // but dereference tsconfig so that mergeExtended can modify the original without affecting extended[0] /** @type {import('./public.d.ts').TSConfckParseResult[]} */ const extended = [ { tsconfigFile: result.tsconfigFile, tsconfig: JSON.parse(JSON.stringify(result.tsconfig)) } ]; // flatten extends graph into extended let pos = 0; /** @type {string[]} */ const extendsPath = []; let currentBranchDepth = 0; while (pos < extended.length) { const extending = extended[pos]; extendsPath.push(extending.tsconfigFile); if (extending.tsconfig.extends) { // keep following this branch currentBranchDepth += 1; /** @type {string[]} */ let resolvedExtends; if (!Array.isArray(extending.tsconfig.extends)) { resolvedExtends = [resolveExtends(extending.tsconfig.extends, extending.tsconfigFile)]; } else { // reverse because typescript 5.0 treats ['a','b','c'] as c extends b extends a resolvedExtends = extending.tsconfig.extends .reverse() .map((ex) => resolveExtends(ex, extending.tsconfigFile)); } const circularExtends = resolvedExtends.find((tsconfigFile) => extendsPath.includes(tsconfigFile) ); if (circularExtends) { const circle = extendsPath.concat([circularExtends]).join(' -> '); throw new TSConfckParseError( `Circular dependency in "extends": ${circle}`, 'EXTENDS_CIRCULAR', result.tsconfigFile ); } // add new extends to the list directly after current extended.splice( pos + 1, 0, ...(await Promise.all(resolvedExtends.map((file) => parseFile$1(file, cache)))) ); } else { // reached a leaf, backtrack to the last branching point and continue extendsPath.splice(-currentBranchDepth); currentBranchDepth = 0; } pos = pos + 1; } result.extended = extended; // skip first as it is the original config for (const ext of result.extended.slice(1)) { extendTSConfig(result, ext); } } /** * * @param {string} extended * @param {string} from * @returns {string} */ function resolveExtends(extended, from) { if (extended === '..') { // see #149 extended = '../tsconfig.json'; } const req = createRequire$2(from); let error; try { return req.resolve(extended); } catch (e) { error = e; } if (extended[0] !== '.' && !path$o.isAbsolute(extended)) { try { return req.resolve(`${extended}/tsconfig.json`); } catch (e) { error = e; } } throw new TSConfckParseError( `failed to resolve "extends":"${extended}" in ${from}`, 'EXTENDS_RESOLVE', from, error ); } // references, extends and custom keys are not carried over const EXTENDABLE_KEYS = [ 'compilerOptions', 'files', 'include', 'exclude', 'watchOptions', 'compileOnSave', 'typeAcquisition', 'buildOptions' ]; /** * * @param {import('./public.d.ts').TSConfckParseResult} extending * @param {import('./public.d.ts').TSConfckParseResult} extended * @returns void */ function extendTSConfig(extending, extended) { const extendingConfig = extending.tsconfig; const extendedConfig = extended.tsconfig; const relativePath = native2posix( path$o.relative(path$o.dirname(extending.tsconfigFile), path$o.dirname(extended.tsconfigFile)) ); for (const key of Object.keys(extendedConfig).filter((key) => EXTENDABLE_KEYS.includes(key))) { if (key === 'compilerOptions') { if (!extendingConfig.compilerOptions) { extendingConfig.compilerOptions = {}; } for (const option of Object.keys(extendedConfig.compilerOptions)) { if (Object.prototype.hasOwnProperty.call(extendingConfig.compilerOptions, option)) { continue; // already set } extendingConfig.compilerOptions[option] = rebaseRelative( option, extendedConfig.compilerOptions[option], relativePath ); } } else if (extendingConfig[key] === undefined) { if (key === 'watchOptions') { extendingConfig.watchOptions = {}; for (const option of Object.keys(extendedConfig.watchOptions)) { extendingConfig.watchOptions[option] = rebaseRelative( option, extendedConfig.watchOptions[option], relativePath ); } } else { extendingConfig[key] = rebaseRelative(key, extendedConfig[key], relativePath); } } } } const REBASE_KEYS = [ // root 'files', 'include', 'exclude', // compilerOptions 'baseUrl', 'rootDir', 'rootDirs', 'typeRoots', 'outDir', 'outFile', 'declarationDir', // watchOptions 'excludeDirectories', 'excludeFiles' ]; /** @typedef {string | string[]} PathValue */ /** * * @param {string} key * @param {PathValue} value * @param {string} prependPath * @returns {PathValue} */ function rebaseRelative(key, value, prependPath) { if (!REBASE_KEYS.includes(key)) { return value; } if (Array.isArray(value)) { return value.map((x) => rebasePath(x, prependPath)); } else { return rebasePath(value, prependPath); } } /** * * @param {string} value * @param {string} prependPath * @returns {string} */ function rebasePath(value, prependPath) { if (path$o.isAbsolute(value)) { return value; } else { // relative paths use posix syntax in tsconfig return path$o.posix.normalize(path$o.posix.join(prependPath, value)); } } class TSConfckParseError extends Error { /** * error code * @type {string} */ code; /** * error cause * @type { Error | undefined} */ cause; /** * absolute path of tsconfig file where the error happened * @type {string} */ tsconfigFile; /** * * @param {string} message - error message * @param {string} code - error code * @param {string} tsconfigFile - path to tsconfig file * @param {Error?} cause - cause of this error */ constructor(message, code, tsconfigFile, cause) { super(message); // Set the prototype explicitly. Object.setPrototypeOf(this, TSConfckParseError.prototype); this.name = TSConfckParseError.name; this.code = code; this.cause = cause; this.tsconfigFile = tsconfigFile; } } /** * * @param {any} tsconfig * @param {string} tsconfigFile */ function applyDefaults(tsconfig, tsconfigFile) { if (isJSConfig(tsconfigFile)) { tsconfig.compilerOptions = { ...DEFAULT_JSCONFIG_COMPILER_OPTIONS, ...tsconfig.compilerOptions }; } } const DEFAULT_JSCONFIG_COMPILER_OPTIONS = { allowJs: true, maxNodeModuleJsDepth: 2, allowSyntheticDefaultImports: true, skipLibCheck: true, noEmit: true }; /** * @param {string} configFileName */ function isJSConfig(configFileName) { return path$o.basename(configFileName) === 'jsconfig.json'; } /** @template T */ class TSConfckCache { /** * clear cache, use this if you have a long running process and tsconfig files have been added,changed or deleted */ clear() { this.#configPaths.clear(); this.#parsed.clear(); } /** * has cached closest config for files in dir * @param {string} dir * @param {string} [configName=tsconfig.json] * @returns {boolean} */ hasConfigPath(dir, configName = 'tsconfig.json') { return this.#configPaths.has(`${dir}/${configName}`); } /** * get cached closest tsconfig for files in dir * @param {string} dir * @param {string} [configName=tsconfig.json] * @returns {Promise|string|null} * @throws {unknown} if cached value is an error */ getConfigPath(dir, configName = 'tsconfig.json') { const key = `${dir}/${configName}`; const value = this.#configPaths.get(key); if (value == null || value.length || value.then) { return value; } else { throw value; } } /** * has parsed tsconfig for file * @param {string} file * @returns {boolean} */ hasParseResult(file) { return this.#parsed.has(file); } /** * get parsed tsconfig for file * @param {string} file * @returns {Promise|T} * @throws {unknown} if cached value is an error */ getParseResult(file) { const value = this.#parsed.get(file); if (value.then || value.tsconfig) { return value; } else { throw value; // cached error, rethrow } } /** * @internal * @private * @param file * @param {boolean} isRootFile a flag to check if current file which involking the parse() api, used to distinguish the normal cache which only parsed by parseFile() * @param {Promise} result */ setParseResult(file, result, isRootFile = false) { // _isRootFile_ is a temporary property for Promise result, used to prevent deadlock with cache Object.defineProperty(result, '_isRootFile_', { value: isRootFile, writable: false, enumerable: false, configurable: false }); this.#parsed.set(file, result); result .then((parsed) => { if (this.#parsed.get(file) === result) { this.#parsed.set(file, parsed); } }) .catch((e) => { if (this.#parsed.get(file) === result) { this.#parsed.set(file, e); } }); } /** * @internal * @private * @param {string} dir * @param {Promise} configPath * @param {string} [configName=tsconfig.json] */ setConfigPath(dir, configPath, configName = 'tsconfig.json') { const key = `${dir}/${configName}`; this.#configPaths.set(key, configPath); configPath .then((path) => { if (this.#configPaths.get(key) === configPath) { this.#configPaths.set(key, path); } }) .catch((e) => { if (this.#configPaths.get(key) === configPath) { this.#configPaths.set(key, e); } }); } /** * map directories to their closest tsconfig.json * @internal * @private * @type{Map|string|null)>} */ #configPaths = new Map(); /** * map files to their parsed tsconfig result * @internal * @private * @type {Map|T)> } */ #parsed = new Map(); } const debug$h = createDebugger('vite:esbuild'); // IIFE content looks like `var MyLib = function() {`. // Spaces are removed and parameters are mangled when minified const IIFE_BEGIN_RE = /(const|var)\s+\S+\s*=\s*function\([^()]*\)\s*\{\s*"use strict";/; const validExtensionRE = /\.\w+$/; const jsxExtensionsRE = /\.(?:j|t)sx\b/; let server; async function transformWithEsbuild(code, filename, options, inMap) { let loader = options?.loader; if (!loader) { // if the id ends with a valid ext, use it (e.g. vue blocks) // otherwise, cleanup the query before checking the ext const ext = path$o .extname(validExtensionRE.test(filename) ? filename : cleanUrl(filename)) .slice(1); if (ext === 'cjs' || ext === 'mjs') { loader = 'js'; } else if (ext === 'cts' || ext === 'mts') { loader = 'ts'; } else { loader = ext; } } let tsconfigRaw = options?.tsconfigRaw; // if options provide tsconfigRaw in string, it takes highest precedence if (typeof tsconfigRaw !== 'string') { // these fields would affect the compilation result // https://esbuild.github.io/content-types/#tsconfig-json const meaningfulFields = [ 'alwaysStrict', 'experimentalDecorators', 'importsNotUsedAsValues', 'jsx', 'jsxFactory', 'jsxFragmentFactory', 'jsxImportSource', 'preserveValueImports', 'target', 'useDefineForClassFields', 'verbatimModuleSyntax', ]; const compilerOptionsForFile = {}; if (loader === 'ts' || loader === 'tsx') { const loadedTsconfig = await loadTsconfigJsonForFile(filename); const loadedCompilerOptions = loadedTsconfig.compilerOptions ?? {}; for (const field of meaningfulFields) { if (field in loadedCompilerOptions) { // @ts-expect-error TypeScript can't tell they are of the same type compilerOptionsForFile[field] = loadedCompilerOptions[field]; } } } const compilerOptions = { ...compilerOptionsForFile, ...tsconfigRaw?.compilerOptions, }; // esbuild uses `useDefineForClassFields: true` when `tsconfig.compilerOptions.target` isn't declared // but we want `useDefineForClassFields: false` when `tsconfig.compilerOptions.target` isn't declared // to align with the TypeScript's behavior if (compilerOptions.useDefineForClassFields === undefined && compilerOptions.target === undefined) { compilerOptions.useDefineForClassFields = false; } // esbuild uses tsconfig fields when both the normal options and tsconfig was set // but we want to prioritize the normal options if (options) { options.jsx && (compilerOptions.jsx = undefined); options.jsxFactory && (compilerOptions.jsxFactory = undefined); options.jsxFragment && (compilerOptions.jsxFragmentFactory = undefined); options.jsxImportSource && (compilerOptions.jsxImportSource = undefined); } tsconfigRaw = { ...tsconfigRaw, compilerOptions, }; } const resolvedOptions = { sourcemap: true, // ensure source file name contains full query sourcefile: filename, ...options, loader, tsconfigRaw, }; // Some projects in the ecosystem are calling this function with an ESBuildOptions // object and esbuild throws an error for extra fields // @ts-expect-error include exists in ESBuildOptions delete resolvedOptions.include; // @ts-expect-error exclude exists in ESBuildOptions delete resolvedOptions.exclude; // @ts-expect-error jsxInject exists in ESBuildOptions delete resolvedOptions.jsxInject; try { const result = await transform$1(code, resolvedOptions); let map; if (inMap && resolvedOptions.sourcemap) { const nextMap = JSON.parse(result.map); nextMap.sourcesContent = []; map = combineSourcemaps(filename, [ nextMap, inMap, ]); } else { map = resolvedOptions.sourcemap && resolvedOptions.sourcemap !== 'inline' ? JSON.parse(result.map) : { mappings: '' }; } return { ...result, map, }; } catch (e) { debug$h?.(`esbuild error with options used: `, resolvedOptions); // patch error information if (e.errors) { e.frame = ''; e.errors.forEach((m) => { if (m.text === 'Experimental decorators are not currently enabled' || m.text === 'Parameter decorators only work when experimental decorators are enabled') { m.text += '. Vite 5 now uses esbuild 0.18 and you need to enable them by adding "experimentalDecorators": true in your "tsconfig.json" file.'; } e.frame += `\n` + prettifyMessage(m, code); }); e.loc = e.errors[0].location; } throw e; } } function esbuildPlugin(config) { const options = config.esbuild; const { jsxInject, include, exclude, ...esbuildTransformOptions } = options; const filter = createFilter(include || /\.(m?ts|[jt]sx)$/, exclude || /\.js$/); // Remove optimization options for dev as we only need to transpile them, // and for build as the final optimization is in `buildEsbuildPlugin` const transformOptions = { target: 'esnext', charset: 'utf8', ...esbuildTransformOptions, minify: false, minifyIdentifiers: false, minifySyntax: false, minifyWhitespace: false, treeShaking: false, // keepNames is not needed when minify is disabled. // Also transforming multiple times with keepNames enabled breaks // tree-shaking. (#9164) keepNames: false, }; return { name: 'vite:esbuild', configureServer(_server) { server = _server; server.watcher .on('add', reloadOnTsconfigChange) .on('change', reloadOnTsconfigChange) .on('unlink', reloadOnTsconfigChange); }, buildEnd() { // recycle serve to avoid preventing Node self-exit (#6815) server = null; }, async transform(code, id) { if (filter(id) || filter(cleanUrl(id))) { const result = await transformWithEsbuild(code, id, transformOptions); if (result.warnings.length) { result.warnings.forEach((m) => { this.warn(prettifyMessage(m, code)); }); } if (jsxInject && jsxExtensionsRE.test(id)) { result.code = jsxInject + ';' + result.code; } return { code: result.code, map: result.map, }; } }, }; } const rollupToEsbuildFormatMap = { es: 'esm', cjs: 'cjs', // passing `var Lib = (() => {})()` to esbuild with format = "iife" // will turn it to `(() => { var Lib = (() => {})() })()`, // so we remove the format config to tell esbuild not doing this // // although esbuild doesn't change format, there is still possibility // that `{ treeShaking: true }` removes a top-level no-side-effect variable // like: `var Lib = 1`, which becomes `` after esbuild transforming, // but thankfully rollup does not do this optimization now iife: undefined, }; const buildEsbuildPlugin = (config) => { return { name: 'vite:esbuild-transpile', async renderChunk(code, chunk, opts) { // @ts-expect-error injected by @vitejs/plugin-legacy if (opts.__vite_skip_esbuild__) { return null; } const options = resolveEsbuildTranspileOptions(config, opts.format); if (!options) { return null; } const res = await transformWithEsbuild(code, chunk.fileName, options); if (config.build.lib) { // #7188, esbuild adds helpers out of the UMD and IIFE wrappers, and the // names are minified potentially causing collision with other globals. // We inject the helpers inside the wrappers. // e.g. turn: // (function(){ /*actual content/* })() // into: // (function(){ /*actual content/* })() // Not using regex because it's too hard to rule out performance issues like #8738 #8099 #10900 #14065 // Instead, using plain string index manipulation (indexOf, slice) which is simple and performant // We don't need to create a MagicString here because both the helpers and // the headers don't modify the sourcemap const esbuildCode = res.code; const contentIndex = opts.format === 'iife' ? Math.max(esbuildCode.search(IIFE_BEGIN_RE), 0) : opts.format === 'umd' ? esbuildCode.indexOf(`(function(`) // same for minified or not : 0; if (contentIndex > 0) { const esbuildHelpers = esbuildCode.slice(0, contentIndex); res.code = esbuildCode .slice(contentIndex) .replace(`"use strict";`, `"use strict";` + esbuildHelpers); } } return res; }, }; }; function resolveEsbuildTranspileOptions(config, format) { const target = config.build.target; const minify = config.build.minify === 'esbuild'; if ((!target || target === 'esnext') && !minify) { return null; } // Do not minify whitespace for ES lib output since that would remove // pure annotations and break tree-shaking // https://github.com/vuejs/core/issues/2860#issuecomment-926882793 const isEsLibBuild = config.build.lib && format === 'es'; const esbuildOptions = config.esbuild || {}; const options = { charset: 'utf8', ...esbuildOptions, loader: 'js', target: target || undefined, format: rollupToEsbuildFormatMap[format], // the final build should always support dynamic import and import.meta. // if they need to be polyfilled, plugin-legacy should be used. // plugin-legacy detects these two features when checking for modern code. supported: { 'dynamic-import': true, 'import-meta': true, ...esbuildOptions.supported, }, }; // If no minify, disable all minify options if (!minify) { return { ...options, minify: false, minifyIdentifiers: false, minifySyntax: false, minifyWhitespace: false, treeShaking: false, }; } // If user enable fine-grain minify options, minify with their options instead if (options.minifyIdentifiers != null || options.minifySyntax != null || options.minifyWhitespace != null) { if (isEsLibBuild) { // Disable minify whitespace as it breaks tree-shaking return { ...options, minify: false, minifyIdentifiers: options.minifyIdentifiers ?? true, minifySyntax: options.minifySyntax ?? true, minifyWhitespace: false, treeShaking: true, }; } else { return { ...options, minify: false, minifyIdentifiers: options.minifyIdentifiers ?? true, minifySyntax: options.minifySyntax ?? true, minifyWhitespace: options.minifyWhitespace ?? true, treeShaking: true, }; } } // Else apply default minify options if (isEsLibBuild) { // Minify all except whitespace as it breaks tree-shaking return { ...options, minify: false, minifyIdentifiers: true, minifySyntax: true, minifyWhitespace: false, treeShaking: true, }; } else { return { ...options, minify: true, treeShaking: true, }; } } function prettifyMessage(m, code) { let res = colors$1.yellow(m.text); if (m.location) { res += `\n` + generateCodeFrame(code, m.location); } return res + `\n`; } let tsconfckCache; async function loadTsconfigJsonForFile(filename) { try { if (!tsconfckCache) { tsconfckCache = new TSConfckCache(); } const result = await parse$f(filename, { cache: tsconfckCache, ignoreNodeModules: true, }); // tsconfig could be out of root, make sure it is watched on dev if (server && result.tsconfigFile) { ensureWatchedFile(server.watcher, result.tsconfigFile, server.config.root); } return result.tsconfig; } catch (e) { if (e instanceof TSConfckParseError) { // tsconfig could be out of root, make sure it is watched on dev if (server && e.tsconfigFile) { ensureWatchedFile(server.watcher, e.tsconfigFile, server.config.root); } } throw e; } } async function reloadOnTsconfigChange(changedFile) { // server could be closed externally after a file change is detected if (!server) return; // any tsconfig.json that's added in the workspace could be closer to a code file than a previously cached one // any json file in the tsconfig cache could have been used to compile ts if (path$o.basename(changedFile) === 'tsconfig.json' || (changedFile.endsWith('.json') && tsconfckCache?.hasParseResult(changedFile))) { server.config.logger.info(`changed tsconfig file detected: ${changedFile} - Clearing cache and forcing full-reload to ensure TypeScript is compiled with updated config values.`, { clear: server.config.clearScreen, timestamp: true }); // clear module graph to remove code compiled with outdated config server.moduleGraph.invalidateAll(); // reset tsconfck so that recompile works with up2date configs tsconfckCache?.clear(); // server may not be available if vite config is updated at the same time if (server) { // force full reload server.hot.send({ type: 'full-reload', path: '*', }); } } } // src/realWorker.ts var Worker = class { /** @internal */ _code; /** @internal */ _parentFunctions; /** @internal */ _max; /** @internal */ _pool; /** @internal */ _idlePool; /** @internal */ _queue; constructor(fn, options = {}) { this._code = genWorkerCode(fn, options.parentFunctions ?? {}); this._parentFunctions = options.parentFunctions ?? {}; const defaultMax = Math.max( 1, // os.availableParallelism is available from Node.js 18.14.0 (os$4.availableParallelism?.() ?? os$4.cpus().length) - 1 ); this._max = options.max || defaultMax; this._pool = []; this._idlePool = []; this._queue = []; } async run(...args) { const worker = await this._getAvailableWorker(); return new Promise((resolve, reject) => { worker.currentResolve = resolve; worker.currentReject = reject; worker.postMessage({ type: "run", args }); }); } stop() { this._pool.forEach((w) => w.unref()); this._queue.forEach( ([, reject]) => reject( new Error("Main worker pool stopped before a worker was available.") ) ); this._pool = []; this._idlePool = []; this._queue = []; } /** @internal */ async _getAvailableWorker() { if (this._idlePool.length) { return this._idlePool.shift(); } if (this._pool.length < this._max) { const worker = new Worker$1(this._code, { eval: true }); worker.on("message", async (args) => { if (args.type === "run") { if ("result" in args) { worker.currentResolve && worker.currentResolve(args.result); worker.currentResolve = null; } else { if (args.error instanceof ReferenceError) { args.error.message += ". Maybe you forgot to pass the function to parentFunction?"; } worker.currentReject && worker.currentReject(args.error); worker.currentReject = null; } this._assignDoneWorker(worker); } else if (args.type === "parentFunction") { try { const result = await this._parentFunctions[args.name](...args.args); worker.postMessage({ type: "parentFunction", id: args.id, result }); } catch (e) { worker.postMessage({ type: "parentFunction", id: args.id, error: e }); } } }); worker.on("error", (err) => { worker.currentReject && worker.currentReject(err); worker.currentReject = null; }); worker.on("exit", (code) => { const i = this._pool.indexOf(worker); if (i > -1) this._pool.splice(i, 1); if (code !== 0 && worker.currentReject) { worker.currentReject( new Error(`Worker stopped with non-0 exit code ${code}`) ); worker.currentReject = null; } }); this._pool.push(worker); return worker; } let resolve; let reject; const onWorkerAvailablePromise = new Promise((r, rj) => { resolve = r; reject = rj; }); this._queue.push([resolve, reject]); return onWorkerAvailablePromise; } /** @internal */ _assignDoneWorker(worker) { if (this._queue.length) { const [resolve] = this._queue.shift(); resolve(worker); return; } this._idlePool.push(worker); } }; function genWorkerCode(fn, parentFunctions) { const createParentFunctionCaller = (parentPort) => { let id = 0; const resolvers = /* @__PURE__ */ new Map(); const call = (key) => async (...args) => { id++; let resolve, reject; const promise = new Promise((res, rej) => { resolve = res; reject = rej; }); resolvers.set(id, { resolve, reject }); parentPort.postMessage({ type: "parentFunction", id, name: key, args }); return await promise; }; const receive = (id2, args) => { if (resolvers.has(id2)) { const { resolve, reject } = resolvers.get(id2); resolvers.delete(id2); if ("result" in args) { resolve(args.result); } else { reject(args.error); } } }; return { call, receive }; }; return ` const { parentPort } = require('worker_threads') const parentFunctionCaller = (${createParentFunctionCaller.toString()})(parentPort) const doWork = (() => { ${Object.keys(parentFunctions).map( (key) => `const ${key} = parentFunctionCaller.call(${JSON.stringify(key)});` ).join("\n")} return (${fn.toString()})() })() parentPort.on('message', async (args) => { if (args.type === 'run') { try { const res = await doWork(...args.args) parentPort.postMessage({ type: 'run', result: res }) } catch (e) { parentPort.postMessage({ type: 'run', error: e }) } } else if (args.type === 'parentFunction') { parentFunctionCaller.receive(args.id, args) } }) `; } var FakeWorker = class { /** @internal */ _fn; constructor(fn, options = {}) { const argsAndCode = genFakeWorkerArgsAndCode( fn, options.parentFunctions ?? {} ); const require2 = createRequire$1(import.meta.url); this._fn = new Function(...argsAndCode)(require2, options.parentFunctions); } async run(...args) { try { return await this._fn(...args); } catch (err) { if (err instanceof ReferenceError) { err.message += ". Maybe you forgot to pass the function to parentFunction?"; } throw err; } } stop() { } }; function genFakeWorkerArgsAndCode(fn, parentFunctions) { return [ "require", "parentFunctions", ` ${Object.keys(parentFunctions).map((key) => `const ${key} = parentFunctions[${JSON.stringify(key)}];`).join("\n")} return (${fn.toString()})() ` ]; } // src/workerWithFallback.ts var WorkerWithFallback = class { /** @internal */ _disableReal; /** @internal */ _realWorker; /** @internal */ _fakeWorker; /** @internal */ _shouldUseFake; constructor(fn, options) { this._disableReal = options.max !== void 0 && options.max <= 0; this._realWorker = new Worker(fn, options); this._fakeWorker = new FakeWorker(fn, options); this._shouldUseFake = options.shouldUseFake; } async run(...args) { const useFake = this._disableReal || this._shouldUseFake(...args); return this[useFake ? "_fakeWorker" : "_realWorker"].run(...args); } stop() { this._realWorker.stop(); this._fakeWorker.stop(); } }; let terserPath; const loadTerserPath = (root) => { if (terserPath) return terserPath; try { terserPath = requireResolveFromRootWithFallback(root, 'terser'); } catch (e) { if (e.code === 'MODULE_NOT_FOUND') { throw new Error('terser not found. Since Vite v3, terser has become an optional dependency. You need to install it.'); } else { const message = new Error(`terser failed to load:\n${e.message}`); message.stack = e.stack + '\n' + message.stack; throw message; } } return terserPath; }; function terserPlugin(config) { const { maxWorkers, ...terserOptions } = config.build.terserOptions; const makeWorker = () => new Worker(() => async (terserPath, code, options) => { // test fails when using `import`. maybe related: https://github.com/nodejs/node/issues/43205 // eslint-disable-next-line no-restricted-globals -- this function runs inside cjs const terser = require(terserPath); return terser.minify(code, options); }, { max: maxWorkers, }); let worker; return { name: 'vite:terser', async renderChunk(code, _chunk, outputOptions) { // This plugin is included for any non-false value of config.build.minify, // so that normal chunks can use the preferred minifier, and legacy chunks // can use terser. if (config.build.minify !== 'terser' && // @ts-expect-error injected by @vitejs/plugin-legacy !outputOptions.__vite_force_terser__) { return null; } // Do not minify ES lib output since that would remove pure annotations // and break tree-shaking. if (config.build.lib && outputOptions.format === 'es') { return null; } // Lazy load worker. worker ||= makeWorker(); const terserPath = loadTerserPath(config.root); const res = await worker.run(terserPath, code, { safari10: true, ...terserOptions, sourceMap: !!outputOptions.sourcemap, module: outputOptions.format.startsWith('es'), toplevel: outputOptions.format === 'cjs', }); return { code: res.code, map: res.map, }; }, closeBundle() { worker?.stop(); }, }; } const mimes$1 = { "3g2": "video/3gpp2", "3gp": "video/3gpp", "3gpp": "video/3gpp", "3mf": "model/3mf", "aac": "audio/aac", "ac": "application/pkix-attr-cert", "adp": "audio/adpcm", "adts": "audio/aac", "ai": "application/postscript", "aml": "application/automationml-aml+xml", "amlx": "application/automationml-amlx+zip", "amr": "audio/amr", "apng": "image/apng", "appcache": "text/cache-manifest", "appinstaller": "application/appinstaller", "appx": "application/appx", "appxbundle": "application/appxbundle", "asc": "application/pgp-keys", "atom": "application/atom+xml", "atomcat": "application/atomcat+xml", "atomdeleted": "application/atomdeleted+xml", "atomsvc": "application/atomsvc+xml", "au": "audio/basic", "avci": "image/avci", "avcs": "image/avcs", "avif": "image/avif", "aw": "application/applixware", "bdoc": "application/bdoc", "bin": "application/octet-stream", "bmp": "image/bmp", "bpk": "application/octet-stream", "btf": "image/prs.btif", "btif": "image/prs.btif", "buffer": "application/octet-stream", "ccxml": "application/ccxml+xml", "cdfx": "application/cdfx+xml", "cdmia": "application/cdmi-capability", "cdmic": "application/cdmi-container", "cdmid": "application/cdmi-domain", "cdmio": "application/cdmi-object", "cdmiq": "application/cdmi-queue", "cer": "application/pkix-cert", "cgm": "image/cgm", "cjs": "application/node", "class": "application/java-vm", "coffee": "text/coffeescript", "conf": "text/plain", "cpl": "application/cpl+xml", "cpt": "application/mac-compactpro", "crl": "application/pkix-crl", "css": "text/css", "csv": "text/csv", "cu": "application/cu-seeme", "cwl": "application/cwl", "cww": "application/prs.cww", "davmount": "application/davmount+xml", "dbk": "application/docbook+xml", "deb": "application/octet-stream", "def": "text/plain", "deploy": "application/octet-stream", "dib": "image/bmp", "disposition-notification": "message/disposition-notification", "dist": "application/octet-stream", "distz": "application/octet-stream", "dll": "application/octet-stream", "dmg": "application/octet-stream", "dms": "application/octet-stream", "doc": "application/msword", "dot": "application/msword", "dpx": "image/dpx", "drle": "image/dicom-rle", "dsc": "text/prs.lines.tag", "dssc": "application/dssc+der", "dtd": "application/xml-dtd", "dump": "application/octet-stream", "dwd": "application/atsc-dwd+xml", "ear": "application/java-archive", "ecma": "application/ecmascript", "elc": "application/octet-stream", "emf": "image/emf", "eml": "message/rfc822", "emma": "application/emma+xml", "emotionml": "application/emotionml+xml", "eps": "application/postscript", "epub": "application/epub+zip", "exe": "application/octet-stream", "exi": "application/exi", "exp": "application/express", "exr": "image/aces", "ez": "application/andrew-inset", "fdf": "application/fdf", "fdt": "application/fdt+xml", "fits": "image/fits", "g3": "image/g3fax", "gbr": "application/rpki-ghostbusters", "geojson": "application/geo+json", "gif": "image/gif", "glb": "model/gltf-binary", "gltf": "model/gltf+json", "gml": "application/gml+xml", "gpx": "application/gpx+xml", "gram": "application/srgs", "grxml": "application/srgs+xml", "gxf": "application/gxf", "gz": "application/gzip", "h261": "video/h261", "h263": "video/h263", "h264": "video/h264", "heic": "image/heic", "heics": "image/heic-sequence", "heif": "image/heif", "heifs": "image/heif-sequence", "hej2": "image/hej2k", "held": "application/atsc-held+xml", "hjson": "application/hjson", "hlp": "application/winhlp", "hqx": "application/mac-binhex40", "hsj2": "image/hsj2", "htm": "text/html", "html": "text/html", "ics": "text/calendar", "ief": "image/ief", "ifb": "text/calendar", "iges": "model/iges", "igs": "model/iges", "img": "application/octet-stream", "in": "text/plain", "ini": "text/plain", "ink": "application/inkml+xml", "inkml": "application/inkml+xml", "ipfix": "application/ipfix", "iso": "application/octet-stream", "its": "application/its+xml", "jade": "text/jade", "jar": "application/java-archive", "jhc": "image/jphc", "jls": "image/jls", "jp2": "image/jp2", "jpe": "image/jpeg", "jpeg": "image/jpeg", "jpf": "image/jpx", "jpg": "image/jpeg", "jpg2": "image/jp2", "jpgm": "image/jpm", "jpgv": "video/jpeg", "jph": "image/jph", "jpm": "image/jpm", "jpx": "image/jpx", "js": "text/javascript", "json": "application/json", "json5": "application/json5", "jsonld": "application/ld+json", "jsonml": "application/jsonml+json", "jsx": "text/jsx", "jt": "model/jt", "jxr": "image/jxr", "jxra": "image/jxra", "jxrs": "image/jxrs", "jxs": "image/jxs", "jxsc": "image/jxsc", "jxsi": "image/jxsi", "jxss": "image/jxss", "kar": "audio/midi", "ktx": "image/ktx", "ktx2": "image/ktx2", "less": "text/less", "lgr": "application/lgr+xml", "list": "text/plain", "litcoffee": "text/coffeescript", "log": "text/plain", "lostxml": "application/lost+xml", "lrf": "application/octet-stream", "m1v": "video/mpeg", "m21": "application/mp21", "m2a": "audio/mpeg", "m2v": "video/mpeg", "m3a": "audio/mpeg", "m4a": "audio/mp4", "m4p": "application/mp4", "m4s": "video/iso.segment", "ma": "application/mathematica", "mads": "application/mads+xml", "maei": "application/mmt-aei+xml", "man": "text/troff", "manifest": "text/cache-manifest", "map": "application/json", "mar": "application/octet-stream", "markdown": "text/markdown", "mathml": "application/mathml+xml", "mb": "application/mathematica", "mbox": "application/mbox", "md": "text/markdown", "mdx": "text/mdx", "me": "text/troff", "mesh": "model/mesh", "meta4": "application/metalink4+xml", "metalink": "application/metalink+xml", "mets": "application/mets+xml", "mft": "application/rpki-manifest", "mid": "audio/midi", "midi": "audio/midi", "mime": "message/rfc822", "mj2": "video/mj2", "mjp2": "video/mj2", "mjs": "text/javascript", "mml": "text/mathml", "mods": "application/mods+xml", "mov": "video/quicktime", "mp2": "audio/mpeg", "mp21": "application/mp21", "mp2a": "audio/mpeg", "mp3": "audio/mpeg", "mp4": "video/mp4", "mp4a": "audio/mp4", "mp4s": "application/mp4", "mp4v": "video/mp4", "mpd": "application/dash+xml", "mpe": "video/mpeg", "mpeg": "video/mpeg", "mpf": "application/media-policy-dataset+xml", "mpg": "video/mpeg", "mpg4": "video/mp4", "mpga": "audio/mpeg", "mpp": "application/dash-patch+xml", "mrc": "application/marc", "mrcx": "application/marcxml+xml", "ms": "text/troff", "mscml": "application/mediaservercontrol+xml", "msh": "model/mesh", "msi": "application/octet-stream", "msix": "application/msix", "msixbundle": "application/msixbundle", "msm": "application/octet-stream", "msp": "application/octet-stream", "mtl": "model/mtl", "musd": "application/mmt-usd+xml", "mxf": "application/mxf", "mxmf": "audio/mobile-xmf", "mxml": "application/xv+xml", "n3": "text/n3", "nb": "application/mathematica", "nq": "application/n-quads", "nt": "application/n-triples", "obj": "model/obj", "oda": "application/oda", "oga": "audio/ogg", "ogg": "audio/ogg", "ogv": "video/ogg", "ogx": "application/ogg", "omdoc": "application/omdoc+xml", "onepkg": "application/onenote", "onetmp": "application/onenote", "onetoc": "application/onenote", "onetoc2": "application/onenote", "opf": "application/oebps-package+xml", "opus": "audio/ogg", "otf": "font/otf", "owl": "application/rdf+xml", "oxps": "application/oxps", "p10": "application/pkcs10", "p7c": "application/pkcs7-mime", "p7m": "application/pkcs7-mime", "p7s": "application/pkcs7-signature", "p8": "application/pkcs8", "pdf": "application/pdf", "pfr": "application/font-tdpfr", "pgp": "application/pgp-encrypted", "pkg": "application/octet-stream", "pki": "application/pkixcmp", "pkipath": "application/pkix-pkipath", "pls": "application/pls+xml", "png": "image/png", "prc": "model/prc", "prf": "application/pics-rules", "provx": "application/provenance+xml", "ps": "application/postscript", "pskcxml": "application/pskc+xml", "pti": "image/prs.pti", "qt": "video/quicktime", "raml": "application/raml+yaml", "rapd": "application/route-apd+xml", "rdf": "application/rdf+xml", "relo": "application/p2p-overlay+xml", "rif": "application/reginfo+xml", "rl": "application/resource-lists+xml", "rld": "application/resource-lists-diff+xml", "rmi": "audio/midi", "rnc": "application/relax-ng-compact-syntax", "rng": "application/xml", "roa": "application/rpki-roa", "roff": "text/troff", "rq": "application/sparql-query", "rs": "application/rls-services+xml", "rsat": "application/atsc-rsat+xml", "rsd": "application/rsd+xml", "rsheet": "application/urc-ressheet+xml", "rss": "application/rss+xml", "rtf": "text/rtf", "rtx": "text/richtext", "rusd": "application/route-usd+xml", "s3m": "audio/s3m", "sbml": "application/sbml+xml", "scq": "application/scvp-cv-request", "scs": "application/scvp-cv-response", "sdp": "application/sdp", "senmlx": "application/senml+xml", "sensmlx": "application/sensml+xml", "ser": "application/java-serialized-object", "setpay": "application/set-payment-initiation", "setreg": "application/set-registration-initiation", "sgi": "image/sgi", "sgm": "text/sgml", "sgml": "text/sgml", "shex": "text/shex", "shf": "application/shf+xml", "shtml": "text/html", "sieve": "application/sieve", "sig": "application/pgp-signature", "sil": "audio/silk", "silo": "model/mesh", "siv": "application/sieve", "slim": "text/slim", "slm": "text/slim", "sls": "application/route-s-tsid+xml", "smi": "application/smil+xml", "smil": "application/smil+xml", "snd": "audio/basic", "so": "application/octet-stream", "spdx": "text/spdx", "spp": "application/scvp-vp-response", "spq": "application/scvp-vp-request", "spx": "audio/ogg", "sql": "application/sql", "sru": "application/sru+xml", "srx": "application/sparql-results+xml", "ssdl": "application/ssdl+xml", "ssml": "application/ssml+xml", "stk": "application/hyperstudio", "stl": "model/stl", "stpx": "model/step+xml", "stpxz": "model/step-xml+zip", "stpz": "model/step+zip", "styl": "text/stylus", "stylus": "text/stylus", "svg": "image/svg+xml", "svgz": "image/svg+xml", "swidtag": "application/swid+xml", "t": "text/troff", "t38": "image/t38", "td": "application/urc-targetdesc+xml", "tei": "application/tei+xml", "teicorpus": "application/tei+xml", "text": "text/plain", "tfi": "application/thraud+xml", "tfx": "image/tiff-fx", "tif": "image/tiff", "tiff": "image/tiff", "toml": "application/toml", "tr": "text/troff", "trig": "application/trig", "ts": "video/mp2t", "tsd": "application/timestamped-data", "tsv": "text/tab-separated-values", "ttc": "font/collection", "ttf": "font/ttf", "ttl": "text/turtle", "ttml": "application/ttml+xml", "txt": "text/plain", "u3d": "model/u3d", "u8dsn": "message/global-delivery-status", "u8hdr": "message/global-headers", "u8mdn": "message/global-disposition-notification", "u8msg": "message/global", "ubj": "application/ubjson", "uri": "text/uri-list", "uris": "text/uri-list", "urls": "text/uri-list", "vcard": "text/vcard", "vrml": "model/vrml", "vtt": "text/vtt", "vxml": "application/voicexml+xml", "war": "application/java-archive", "wasm": "application/wasm", "wav": "audio/wav", "weba": "audio/webm", "webm": "video/webm", "webmanifest": "application/manifest+json", "webp": "image/webp", "wgsl": "text/wgsl", "wgt": "application/widget", "wif": "application/watcherinfo+xml", "wmf": "image/wmf", "woff": "font/woff", "woff2": "font/woff2", "wrl": "model/vrml", "wsdl": "application/wsdl+xml", "wspolicy": "application/wspolicy+xml", "x3d": "model/x3d+xml", "x3db": "model/x3d+fastinfoset", "x3dbz": "model/x3d+binary", "x3dv": "model/x3d-vrml", "x3dvz": "model/x3d+vrml", "x3dz": "model/x3d+xml", "xaml": "application/xaml+xml", "xav": "application/xcap-att+xml", "xca": "application/xcap-caps+xml", "xcs": "application/calendar+xml", "xdf": "application/xcap-diff+xml", "xdssc": "application/dssc+xml", "xel": "application/xcap-el+xml", "xenc": "application/xenc+xml", "xer": "application/patch-ops-error+xml", "xfdf": "application/xfdf", "xht": "application/xhtml+xml", "xhtml": "application/xhtml+xml", "xhvml": "application/xv+xml", "xlf": "application/xliff+xml", "xm": "audio/xm", "xml": "text/xml", "xns": "application/xcap-ns+xml", "xop": "application/xop+xml", "xpl": "application/xproc+xml", "xsd": "application/xml", "xsf": "application/prs.xsf+xml", "xsl": "application/xml", "xslt": "application/xml", "xspf": "application/xspf+xml", "xvm": "application/xv+xml", "xvml": "application/xv+xml", "yaml": "text/yaml", "yang": "application/yang", "yin": "application/yin+xml", "yml": "text/yaml", "zip": "application/zip" }; function lookup(extn) { let tmp = ('' + extn).trim().toLowerCase(); let idx = tmp.lastIndexOf('.'); return mimes$1[!~idx ? tmp : tmp.substring(++idx)]; } const publicFilesMap = new WeakMap(); async function initPublicFiles(config) { let fileNames; try { fileNames = await recursiveReaddir(config.publicDir); } catch (e) { if (e.code === ERR_SYMLINK_IN_RECURSIVE_READDIR) { return; } throw e; } const publicFiles = new Set(fileNames.map((fileName) => fileName.slice(config.publicDir.length))); publicFilesMap.set(config, publicFiles); return publicFiles; } function getPublicFiles(config) { return publicFilesMap.get(config); } function checkPublicFile(url, config) { // note if the file is in /public, the resolver would have returned it // as-is so it's not going to be a fully resolved path. const { publicDir } = config; if (!publicDir || url[0] !== '/') { return; } const fileName = cleanUrl(url); // short-circuit if we have an in-memory publicFiles cache const publicFiles = getPublicFiles(config); if (publicFiles) { return publicFiles.has(fileName) ? normalizePath$3(path$o.join(publicDir, fileName)) : undefined; } const publicFile = normalizePath$3(path$o.join(publicDir, fileName)); if (!publicFile.startsWith(withTrailingSlash(publicDir))) { // can happen if URL starts with '../' return; } return fs$l.existsSync(publicFile) ? publicFile : undefined; } // referenceId is base64url but replaces - with $ const assetUrlRE = /__VITE_ASSET__([\w$]+)__(?:\$_(.*?)__)?/g; const jsSourceMapRE = /\.[cm]?js\.map$/; const assetCache = new WeakMap(); const generatedAssets = new WeakMap(); // add own dictionary entry by directly assigning mrmime function registerCustomMime() { // https://github.com/lukeed/mrmime/issues/3 mimes$1['ico'] = 'image/x-icon'; // https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Containers#flac mimes$1['flac'] = 'audio/flac'; // https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types mimes$1['eot'] = 'application/vnd.ms-fontobject'; } function renderAssetUrlInJS(ctx, config, chunk, opts, code) { const toRelativeRuntime = createToImportMetaURLBasedRelativeRuntime(opts.format, config.isWorker); let match; let s; // Urls added with JS using e.g. // imgElement.src = "__VITE_ASSET__5aA0Ddc0__" are using quotes // Urls added in CSS that is imported in JS end up like // var inlined = ".inlined{color:green;background:url(__VITE_ASSET__5aA0Ddc0__)}\n"; // In both cases, the wrapping should already be fine assetUrlRE.lastIndex = 0; while ((match = assetUrlRE.exec(code))) { s ||= new MagicString(code); const [full, referenceId, postfix = ''] = match; const file = ctx.getFileName(referenceId); chunk.viteMetadata.importedAssets.add(cleanUrl(file)); const filename = file + postfix; const replacement = toOutputFilePathInJS(filename, 'asset', chunk.fileName, 'js', config, toRelativeRuntime); const replacementString = typeof replacement === 'string' ? JSON.stringify(replacement).slice(1, -1) : `"+${replacement.runtime}+"`; s.update(match.index, match.index + full.length, replacementString); } // Replace __VITE_PUBLIC_ASSET__5aA0Ddc0__ with absolute paths const publicAssetUrlMap = publicAssetUrlCache.get(config); publicAssetUrlRE.lastIndex = 0; while ((match = publicAssetUrlRE.exec(code))) { s ||= new MagicString(code); const [full, hash] = match; const publicUrl = publicAssetUrlMap.get(hash).slice(1); const replacement = toOutputFilePathInJS(publicUrl, 'public', chunk.fileName, 'js', config, toRelativeRuntime); const replacementString = typeof replacement === 'string' ? JSON.stringify(replacement).slice(1, -1) : `"+${replacement.runtime}+"`; s.update(match.index, match.index + full.length, replacementString); } return s; } // During build, if we don't use a virtual file for public assets, rollup will // watch for these ids resulting in watching the root of the file system in Windows, const viteBuildPublicIdPrefix = '\0vite:asset:public'; /** * Also supports loading plain strings with import text from './foo.txt?raw' */ function assetPlugin(config) { registerCustomMime(); let moduleGraph; return { name: 'vite:asset', buildStart() { assetCache.set(config, new Map()); generatedAssets.set(config, new Map()); }, configureServer(server) { moduleGraph = server.moduleGraph; }, resolveId(id) { if (!config.assetsInclude(cleanUrl(id)) && !urlRE.test(id)) { return; } // imports to absolute urls pointing to files in /public // will fail to resolve in the main resolver. handle them here. const publicFile = checkPublicFile(id, config); if (publicFile) { return config.command === 'build' ? `${viteBuildPublicIdPrefix}${id}` : id; } }, async load(id) { if (id.startsWith(viteBuildPublicIdPrefix)) { id = id.slice(viteBuildPublicIdPrefix.length); } if (id[0] === '\0') { // Rollup convention, this id should be handled by the // plugin that marked it with \0 return; } // raw requests, read from disk if (rawRE.test(id)) { const file = checkPublicFile(id, config) || cleanUrl(id); this.addWatchFile(file); // raw query, read file and return as string return `export default ${JSON.stringify(await fsp.readFile(file, 'utf-8'))}`; } if (!urlRE.test(id) && !config.assetsInclude(cleanUrl(id))) { return; } id = removeUrlQuery(id); let url = await fileToUrl$1(id, config, this); // Inherit HMR timestamp if this asset was invalidated if (moduleGraph) { const mod = moduleGraph.getModuleById(id); if (mod && mod.lastHMRTimestamp > 0) { url = injectQuery(url, `t=${mod.lastHMRTimestamp}`); } } return `export default ${JSON.stringify(url)}`; }, renderChunk(code, chunk, opts) { const s = renderAssetUrlInJS(this, config, chunk, opts, code); if (s) { return { code: s.toString(), map: config.build.sourcemap ? s.generateMap({ hires: 'boundary' }) : null, }; } else { return null; } }, generateBundle(_, bundle) { // do not emit assets for SSR build if (config.command === 'build' && config.build.ssr && !config.build.ssrEmitAssets) { for (const file in bundle) { if (bundle[file].type === 'asset' && !file.endsWith('ssr-manifest.json') && !jsSourceMapRE.test(file)) { delete bundle[file]; } } } }, }; } async function fileToUrl$1(id, config, ctx) { if (config.command === 'serve') { return fileToDevUrl(id, config); } else { return fileToBuiltUrl(id, config, ctx); } } function fileToDevUrl(id, config) { let rtn; if (checkPublicFile(id, config)) { // in public dir during dev, keep the url as-is rtn = id; } else if (id.startsWith(withTrailingSlash(config.root))) { // in project root, infer short public path rtn = '/' + path$o.posix.relative(config.root, id); } else { // outside of project root, use absolute fs path // (this is special handled by the serve static middleware rtn = path$o.posix.join(FS_PREFIX, id); } const base = joinUrlSegments(config.server?.origin ?? '', config.base); return joinUrlSegments(base, removeLeadingSlash(rtn)); } function getPublicAssetFilename(hash, config) { return publicAssetUrlCache.get(config)?.get(hash); } const publicAssetUrlCache = new WeakMap(); const publicAssetUrlRE = /__VITE_PUBLIC_ASSET__([a-z\d]{8})__/g; function publicFileToBuiltUrl(url, config) { if (config.command !== 'build') { // We don't need relative base or renderBuiltUrl support during dev return joinUrlSegments(config.base, url); } const hash = getHash(url); let cache = publicAssetUrlCache.get(config); if (!cache) { cache = new Map(); publicAssetUrlCache.set(config, cache); } if (!cache.get(hash)) { cache.set(hash, url); } return `__VITE_PUBLIC_ASSET__${hash}__`; } const GIT_LFS_PREFIX = Buffer$1.from('version https://git-lfs.github.com'); function isGitLfsPlaceholder(content) { if (content.length < GIT_LFS_PREFIX.length) return false; // Check whether the content begins with the characteristic string of Git LFS placeholders return GIT_LFS_PREFIX.compare(content, 0, GIT_LFS_PREFIX.length) === 0; } /** * Register an asset to be emitted as part of the bundle (if necessary) * and returns the resolved public URL */ async function fileToBuiltUrl(id, config, pluginContext, skipPublicCheck = false, forceInline) { if (!skipPublicCheck && checkPublicFile(id, config)) { return publicFileToBuiltUrl(id, config); } const cache = assetCache.get(config); const cached = cache.get(id); if (cached) { return cached; } const file = cleanUrl(id); const content = await fsp.readFile(file); let url; if (shouldInline(config, file, id, content, forceInline)) { if (config.build.lib && isGitLfsPlaceholder(content)) { config.logger.warn(colors$1.yellow(`Inlined file ${id} was not downloaded via Git LFS`)); } if (file.endsWith('.svg')) { url = svgToDataURL(content); } else { const mimeType = lookup(file) ?? 'application/octet-stream'; // base64 inlined as a string url = `data:${mimeType};base64,${content.toString('base64')}`; } } else { // emit as asset const { search, hash } = parse$i(id); const postfix = (search || '') + (hash || ''); const referenceId = pluginContext.emitFile({ // Ignore directory structure for asset file names name: path$o.basename(file), type: 'asset', source: content, }); const originalName = normalizePath$3(path$o.relative(config.root, file)); generatedAssets.get(config).set(referenceId, { originalName }); url = `__VITE_ASSET__${referenceId}__${postfix ? `$_${postfix}__` : ``}`; // TODO_BASE } cache.set(id, url); return url; } async function urlToBuiltUrl(url, importer, config, pluginContext, forceInline) { if (checkPublicFile(url, config)) { return publicFileToBuiltUrl(url, config); } const file = url[0] === '/' ? path$o.join(config.root, url) : path$o.join(path$o.dirname(importer), url); return fileToBuiltUrl(file, config, pluginContext, // skip public check since we just did it above true, forceInline); } const shouldInline = (config, file, id, content, forceInline) => { if (config.build.lib) return true; if (forceInline !== undefined) return forceInline; let limit; if (typeof config.build.assetsInlineLimit === 'function') { const userShouldInline = config.build.assetsInlineLimit(file, content); if (userShouldInline != null) return userShouldInline; limit = DEFAULT_ASSETS_INLINE_LIMIT; } else { limit = Number(config.build.assetsInlineLimit); } if (file.endsWith('.html')) return false; // Don't inline SVG with fragments, as they are meant to be reused if (file.endsWith('.svg') && id.includes('#')) return false; return content.length < limit && !isGitLfsPlaceholder(content); }; const nestedQuotesRE = /"[^"']*'[^"]*"|'[^'"]*"[^']*'/; // Inspired by https://github.com/iconify/iconify/blob/main/packages/utils/src/svg/url.ts function svgToDataURL(content) { const stringContent = content.toString(); // If the SVG contains some text or HTML, any transformation is unsafe, and given that double quotes would then // need to be escaped, the gain to use a data URI would be ridiculous if not negative if (stringContent.includes('\s+<') .replaceAll('"', "'") .replaceAll('%', '%25') .replaceAll('#', '%23') .replaceAll('<', '%3c') .replaceAll('>', '%3e') // Spaces are not valid in srcset it has some use cases // it can make the uncompressed URI slightly higher than base64, but will compress way better // https://github.com/vitejs/vite/pull/14643#issuecomment-1766288673 .replaceAll(/\s+/g, '%20')); } } const endsWithJSRE = /\.[cm]?js$/; function manifestPlugin(config) { const manifest = {}; let outputCount; return { name: 'vite:manifest', buildStart() { outputCount = 0; }, generateBundle({ format }, bundle) { function getChunkName(chunk) { return getChunkOriginalFileName(chunk, config.root, format); } function getInternalImports(imports) { const filteredImports = []; for (const file of imports) { if (bundle[file] === undefined) { continue; } filteredImports.push(getChunkName(bundle[file])); } return filteredImports; } function createChunk(chunk) { const manifestChunk = { file: chunk.fileName, }; if (chunk.facadeModuleId) { manifestChunk.src = getChunkName(chunk); } if (chunk.isEntry) { manifestChunk.isEntry = true; } if (chunk.isDynamicEntry) { manifestChunk.isDynamicEntry = true; } if (chunk.imports.length) { const internalImports = getInternalImports(chunk.imports); if (internalImports.length > 0) { manifestChunk.imports = internalImports; } } if (chunk.dynamicImports.length) { const internalImports = getInternalImports(chunk.dynamicImports); if (internalImports.length > 0) { manifestChunk.dynamicImports = internalImports; } } if (chunk.viteMetadata?.importedCss.size) { manifestChunk.css = [...chunk.viteMetadata.importedCss]; } if (chunk.viteMetadata?.importedAssets.size) { manifestChunk.assets = [...chunk.viteMetadata.importedAssets]; } return manifestChunk; } function createAsset(asset, src, isEntry) { const manifestChunk = { file: asset.fileName, src, }; if (isEntry) manifestChunk.isEntry = true; return manifestChunk; } const fileNameToAssetMeta = new Map(); const assets = generatedAssets.get(config); assets.forEach((asset, referenceId) => { try { const fileName = this.getFileName(referenceId); fileNameToAssetMeta.set(fileName, asset); } catch (error) { // The asset was generated as part of a different output option. // It was already handled during the previous run of this plugin. assets.delete(referenceId); } }); const fileNameToAsset = new Map(); for (const file in bundle) { const chunk = bundle[file]; if (chunk.type === 'chunk') { manifest[getChunkName(chunk)] = createChunk(chunk); } else if (chunk.type === 'asset' && typeof chunk.name === 'string') { // Add every unique asset to the manifest, keyed by its original name const assetMeta = fileNameToAssetMeta.get(chunk.fileName); const src = assetMeta?.originalName ?? chunk.name; const asset = createAsset(chunk, src, assetMeta?.isEntry); // If JS chunk and asset chunk are both generated from the same source file, // prioritize JS chunk as it contains more information const file = manifest[src]?.file; if (file && endsWithJSRE.test(file)) continue; manifest[src] = asset; fileNameToAsset.set(chunk.fileName, asset); } } // Add deduplicated assets to the manifest assets.forEach(({ originalName }, referenceId) => { if (!manifest[originalName]) { const fileName = this.getFileName(referenceId); const asset = fileNameToAsset.get(fileName); if (asset) { manifest[originalName] = asset; } } }); outputCount++; const output = config.build.rollupOptions?.output; const outputLength = Array.isArray(output) ? output.length : 1; if (outputCount >= outputLength) { this.emitFile({ fileName: typeof config.build.manifest === 'string' ? config.build.manifest : '.vite/manifest.json', type: 'asset', source: JSON.stringify(sortObjectKeys(manifest), undefined, 2), }); } }, }; } function getChunkOriginalFileName(chunk, root, format) { if (chunk.facadeModuleId) { let name = normalizePath$3(path$o.relative(root, chunk.facadeModuleId)); if (format === 'system' && !chunk.name.includes('-legacy')) { const ext = path$o.extname(name); const endPos = ext.length !== 0 ? -ext.length : undefined; name = name.slice(0, endPos) + `-legacy` + ext; } return name.replace(/\0/g, ''); } else { return `_` + path$o.basename(chunk.fileName); } } // This is based on @rollup/plugin-data-uri // MIT Licensed https://github.com/rollup/plugins/blob/master/LICENSE // ref https://github.com/vitejs/vite/issues/1428#issuecomment-757033808 const dataUriRE = /^([^/]+\/[^;,]+)(;base64)?,([\s\S]*)$/; const base64RE = /base64/i; const dataUriPrefix = `\0/@data-uri/`; /** * Build only, since importing from a data URI works natively. */ function dataURIPlugin() { let resolved; return { name: 'vite:data-uri', buildStart() { resolved = new Map(); }, resolveId(id) { if (!dataUriRE.test(id)) { return; } const uri = new URL$3(id); if (uri.protocol !== 'data:') { return; } const match = uri.pathname.match(dataUriRE); if (!match) { return; } const [, mime, format, data] = match; if (mime !== 'text/javascript') { throw new Error(`data URI with non-JavaScript mime type is not supported. If you're using legacy JavaScript MIME types (such as 'application/javascript'), please use 'text/javascript' instead.`); } // decode data const base64 = format && base64RE.test(format.substring(1)); const content = base64 ? Buffer.from(data, 'base64').toString('utf-8') : data; resolved.set(id, content); return dataUriPrefix + id; }, load(id) { if (id.startsWith(dataUriPrefix)) { return resolved.get(id.slice(dataUriPrefix.length)); } }, }; } /* es-module-lexer 1.4.1 */ const A=1===new Uint8Array(new Uint16Array([1]).buffer)[0];function parse$e(E,g="@"){if(!C)return init.then((()=>parse$e(E)));const I=E.length+1,k=(C.__heap_base.value||C.__heap_base)+4*I-C.memory.buffer.byteLength;k>0&&C.memory.grow(Math.ceil(k/65536));const K=C.sa(I-1);if((A?B:Q)(E,new Uint16Array(C.memory.buffer,K,I)),!C.parse())throw Object.assign(new Error(`Parse error ${g}:${E.slice(0,C.e()).split("\n").length}:${C.e()-E.lastIndexOf("\n",C.e()-1)}`),{idx:C.e()});const o=[],D=[];for(;C.ri();){const A=C.is(),Q=C.ie(),B=C.ai(),g=C.id(),I=C.ss(),k=C.se();let K;C.ip()&&(K=w(E.slice(-1===g?A-1:A,-1===g?Q+1:Q))),o.push({n:K,s:A,e:Q,ss:I,se:k,d:g,a:B});}for(;C.re();){const A=C.es(),Q=C.ee(),B=C.els(),g=C.ele(),I=E.slice(A,Q),k=I[0],K=B<0?void 0:E.slice(B,g),o=K?K[0]:"";D.push({s:A,e:Q,ls:B,le:g,n:'"'===k||"'"===k?w(I):I,ln:'"'===o||"'"===o?w(K):K});}function w(A){try{return (0, eval)(A)}catch(A){}}return [o,D,!!C.f(),!!C.ms()]}function Q(A,Q){const B=A.length;let C=0;for(;C>>8;}}function B(A,Q){const B=A.length;let C=0;for(;CA.charCodeAt(0))))).then(WebAssembly.instantiate).then((({exports:A})=>{C=A;}));var E; var convertSourceMap$1 = {}; (function (exports) { Object.defineProperty(exports, 'commentRegex', { get: function getCommentRegex () { // Groups: 1: media type, 2: MIME type, 3: charset, 4: encoding, 5: data. return /^\s*?\/[\/\*][@#]\s+?sourceMappingURL=data:(((?:application|text)\/json)(?:;charset=([^;,]+?)?)?)?(?:;(base64))?,(.*?)$/mg; } }); Object.defineProperty(exports, 'mapFileCommentRegex', { get: function getMapFileCommentRegex () { // Matches sourceMappingURL in either // or /* comment styles. return /(?:\/\/[@#][ \t]+?sourceMappingURL=([^\s'"`]+?)[ \t]*?$)|(?:\/\*[@#][ \t]+sourceMappingURL=([^*]+?)[ \t]*?(?:\*\/){1}[ \t]*?$)/mg; } }); var decodeBase64; if (typeof Buffer !== 'undefined') { if (typeof Buffer.from === 'function') { decodeBase64 = decodeBase64WithBufferFrom; } else { decodeBase64 = decodeBase64WithNewBuffer; } } else { decodeBase64 = decodeBase64WithAtob; } function decodeBase64WithBufferFrom(base64) { return Buffer.from(base64, 'base64').toString(); } function decodeBase64WithNewBuffer(base64) { if (typeof value === 'number') { throw new TypeError('The value to decode must not be of type number.'); } return new Buffer(base64, 'base64').toString(); } function decodeBase64WithAtob(base64) { return decodeURIComponent(escape(atob(base64))); } function stripComment(sm) { return sm.split(',').pop(); } function readFromFileMap(sm, read) { var r = exports.mapFileCommentRegex.exec(sm); // for some odd reason //# .. captures in 1 and /* .. */ in 2 var filename = r[1] || r[2]; try { var sm = read(filename); if (sm != null && typeof sm.catch === 'function') { return sm.catch(throwError); } else { return sm; } } catch (e) { throwError(e); } function throwError(e) { throw new Error('An error occurred while trying to read the map file at ' + filename + '\n' + e.stack); } } function Converter (sm, opts) { opts = opts || {}; if (opts.hasComment) { sm = stripComment(sm); } if (opts.encoding === 'base64') { sm = decodeBase64(sm); } else if (opts.encoding === 'uri') { sm = decodeURIComponent(sm); } if (opts.isJSON || opts.encoding) { sm = JSON.parse(sm); } this.sourcemap = sm; } Converter.prototype.toJSON = function (space) { return JSON.stringify(this.sourcemap, null, space); }; if (typeof Buffer !== 'undefined') { if (typeof Buffer.from === 'function') { Converter.prototype.toBase64 = encodeBase64WithBufferFrom; } else { Converter.prototype.toBase64 = encodeBase64WithNewBuffer; } } else { Converter.prototype.toBase64 = encodeBase64WithBtoa; } function encodeBase64WithBufferFrom() { var json = this.toJSON(); return Buffer.from(json, 'utf8').toString('base64'); } function encodeBase64WithNewBuffer() { var json = this.toJSON(); if (typeof json === 'number') { throw new TypeError('The json to encode must not be of type number.'); } return new Buffer(json, 'utf8').toString('base64'); } function encodeBase64WithBtoa() { var json = this.toJSON(); return btoa(unescape(encodeURIComponent(json))); } Converter.prototype.toURI = function () { var json = this.toJSON(); return encodeURIComponent(json); }; Converter.prototype.toComment = function (options) { var encoding, content, data; if (options != null && options.encoding === 'uri') { encoding = ''; content = this.toURI(); } else { encoding = ';base64'; content = this.toBase64(); } data = 'sourceMappingURL=data:application/json;charset=utf-8' + encoding + ',' + content; return options != null && options.multiline ? '/*# ' + data + ' */' : '//# ' + data; }; // returns copy instead of original Converter.prototype.toObject = function () { return JSON.parse(this.toJSON()); }; Converter.prototype.addProperty = function (key, value) { if (this.sourcemap.hasOwnProperty(key)) throw new Error('property "' + key + '" already exists on the sourcemap, use set property instead'); return this.setProperty(key, value); }; Converter.prototype.setProperty = function (key, value) { this.sourcemap[key] = value; return this; }; Converter.prototype.getProperty = function (key) { return this.sourcemap[key]; }; exports.fromObject = function (obj) { return new Converter(obj); }; exports.fromJSON = function (json) { return new Converter(json, { isJSON: true }); }; exports.fromURI = function (uri) { return new Converter(uri, { encoding: 'uri' }); }; exports.fromBase64 = function (base64) { return new Converter(base64, { encoding: 'base64' }); }; exports.fromComment = function (comment) { var m, encoding; comment = comment .replace(/^\/\*/g, '//') .replace(/\*\/$/g, ''); m = exports.commentRegex.exec(comment); encoding = m && m[4] || 'uri'; return new Converter(comment, { encoding: encoding, hasComment: true }); }; function makeConverter(sm) { return new Converter(sm, { isJSON: true }); } exports.fromMapFileComment = function (comment, read) { if (typeof read === 'string') { throw new Error( 'String directory paths are no longer supported with `fromMapFileComment`\n' + 'Please review the Upgrading documentation at https://github.com/thlorenz/convert-source-map#upgrading' ) } var sm = readFromFileMap(comment, read); if (sm != null && typeof sm.then === 'function') { return sm.then(makeConverter); } else { return makeConverter(sm); } }; // Finds last sourcemap comment in file or returns null if none was found exports.fromSource = function (content) { var m = content.match(exports.commentRegex); return m ? exports.fromComment(m.pop()) : null; }; // Finds last sourcemap comment in file or returns null if none was found exports.fromMapFileSource = function (content, read) { if (typeof read === 'string') { throw new Error( 'String directory paths are no longer supported with `fromMapFileSource`\n' + 'Please review the Upgrading documentation at https://github.com/thlorenz/convert-source-map#upgrading' ) } var m = content.match(exports.mapFileCommentRegex); return m ? exports.fromMapFileComment(m.pop(), read) : null; }; exports.removeComments = function (src) { return src.replace(exports.commentRegex, ''); }; exports.removeMapFileComments = function (src) { return src.replace(exports.mapFileCommentRegex, ''); }; exports.generateMapFileComment = function (file, options) { var data = 'sourceMappingURL=' + file; return options && options.multiline ? '/*# ' + data + ' */' : '//# ' + data; }; } (convertSourceMap$1)); var convertSourceMap = /*@__PURE__*/getDefaultExportFromCjs(convertSourceMap$1); const debug$g = createDebugger('vite:sourcemap', { onlyWhenFocused: true, }); // Virtual modules should be prefixed with a null byte to avoid a // false positive "missing source" warning. We also check for certain // prefixes used for special handling in esbuildDepPlugin. const virtualSourceRE = /^(?:dep:|browser-external:|virtual:)|\0/; async function computeSourceRoute(map, file) { let sourceRoot; try { // The source root is undefined for virtual modules and permission errors. sourceRoot = await fsp.realpath(path$o.resolve(path$o.dirname(file), map.sourceRoot || '')); } catch { } return sourceRoot; } async function injectSourcesContent(map, file, logger) { let sourceRootPromise; const missingSources = []; const sourcesContent = map.sourcesContent || []; const sourcesContentPromises = []; for (let index = 0; index < map.sources.length; index++) { const sourcePath = map.sources[index]; if (!sourcesContent[index] && sourcePath && !virtualSourceRE.test(sourcePath)) { sourcesContentPromises.push((async () => { // inject content from source file when sourcesContent is null sourceRootPromise ??= computeSourceRoute(map, file); const sourceRoot = await sourceRootPromise; let resolvedSourcePath = decodeURI(sourcePath); if (sourceRoot) { resolvedSourcePath = path$o.resolve(sourceRoot, resolvedSourcePath); } sourcesContent[index] = await fsp .readFile(resolvedSourcePath, 'utf-8') .catch(() => { missingSources.push(resolvedSourcePath); return null; }); })()); } } await Promise.all(sourcesContentPromises); map.sourcesContent = sourcesContent; // Use this command… // DEBUG="vite:sourcemap" vite build // …to log the missing sources. if (missingSources.length) { logger.warnOnce(`Sourcemap for "${file}" points to missing source files`); debug$g?.(`Missing sources:\n ` + missingSources.join(`\n `)); } } function genSourceMapUrl(map) { if (typeof map !== 'string') { map = JSON.stringify(map); } return `data:application/json;base64,${Buffer.from(map).toString('base64')}`; } function getCodeWithSourcemap(type, code, map) { if (debug$g) { code += `\n/*${JSON.stringify(map, null, 2).replace(/\*\//g, '*\\/')}*/\n`; } if (type === 'js') { code += `\n//# sourceMappingURL=${genSourceMapUrl(map)}`; } else if (type === 'css') { code += `\n/*# sourceMappingURL=${genSourceMapUrl(map)} */`; } return code; } function applySourcemapIgnoreList(map, sourcemapPath, sourcemapIgnoreList, logger) { let { x_google_ignoreList } = map; if (x_google_ignoreList === undefined) { x_google_ignoreList = []; } for (let sourcesIndex = 0; sourcesIndex < map.sources.length; ++sourcesIndex) { const sourcePath = map.sources[sourcesIndex]; if (!sourcePath) continue; const ignoreList = sourcemapIgnoreList(path$o.isAbsolute(sourcePath) ? sourcePath : path$o.resolve(path$o.dirname(sourcemapPath), sourcePath), sourcemapPath); if (logger && typeof ignoreList !== 'boolean') { logger.warn('sourcemapIgnoreList function must return a boolean.'); } if (ignoreList && !x_google_ignoreList.includes(sourcesIndex)) { x_google_ignoreList.push(sourcesIndex); } } if (x_google_ignoreList.length > 0) { if (!map.x_google_ignoreList) map.x_google_ignoreList = x_google_ignoreList; } } var tasks = {}; var utils$g = {}; var array$1 = {}; Object.defineProperty(array$1, "__esModule", { value: true }); array$1.splitWhen = array$1.flatten = void 0; function flatten$1(items) { return items.reduce((collection, item) => [].concat(collection, item), []); } array$1.flatten = flatten$1; function splitWhen(items, predicate) { const result = [[]]; let groupIndex = 0; for (const item of items) { if (predicate(item)) { groupIndex++; result[groupIndex] = []; } else { result[groupIndex].push(item); } } return result; } array$1.splitWhen = splitWhen; var errno$1 = {}; Object.defineProperty(errno$1, "__esModule", { value: true }); errno$1.isEnoentCodeError = void 0; function isEnoentCodeError(error) { return error.code === 'ENOENT'; } errno$1.isEnoentCodeError = isEnoentCodeError; var fs$h = {}; Object.defineProperty(fs$h, "__esModule", { value: true }); fs$h.createDirentFromStats = void 0; let DirentFromStats$1 = class DirentFromStats { constructor(name, stats) { this.name = name; this.isBlockDevice = stats.isBlockDevice.bind(stats); this.isCharacterDevice = stats.isCharacterDevice.bind(stats); this.isDirectory = stats.isDirectory.bind(stats); this.isFIFO = stats.isFIFO.bind(stats); this.isFile = stats.isFile.bind(stats); this.isSocket = stats.isSocket.bind(stats); this.isSymbolicLink = stats.isSymbolicLink.bind(stats); } }; function createDirentFromStats$1(name, stats) { return new DirentFromStats$1(name, stats); } fs$h.createDirentFromStats = createDirentFromStats$1; var path$h = {}; Object.defineProperty(path$h, "__esModule", { value: true }); path$h.convertPosixPathToPattern = path$h.convertWindowsPathToPattern = path$h.convertPathToPattern = path$h.escapePosixPath = path$h.escapeWindowsPath = path$h.escape = path$h.removeLeadingDotSegment = path$h.makeAbsolute = path$h.unixify = void 0; const os$3 = require$$2; const path$g = require$$0$4; const IS_WINDOWS_PLATFORM = os$3.platform() === 'win32'; const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\ /** * All non-escaped special characters. * Posix: ()*?[]{|}, !+@ before (, ! at the beginning, \\ before non-special characters. * Windows: (){}[], !+@ before (, ! at the beginning. */ const POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g; const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()[\]{}]|^!|[!+@](?=\())/g; /** * The device path (\\.\ or \\?\). * https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#dos-device-paths */ const DOS_DEVICE_PATH_RE = /^\\\\([.?])/; /** * All backslashes except those escaping special characters. * Windows: !()+@{} * https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions */ const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@[\]{}])/g; /** * Designed to work only with simple paths: `dir\\file`. */ function unixify(filepath) { return filepath.replace(/\\/g, '/'); } path$h.unixify = unixify; function makeAbsolute(cwd, filepath) { return path$g.resolve(cwd, filepath); } path$h.makeAbsolute = makeAbsolute; function removeLeadingDotSegment(entry) { // We do not use `startsWith` because this is 10x slower than current implementation for some cases. // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with if (entry.charAt(0) === '.') { const secondCharactery = entry.charAt(1); if (secondCharactery === '/' || secondCharactery === '\\') { return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT); } } return entry; } path$h.removeLeadingDotSegment = removeLeadingDotSegment; path$h.escape = IS_WINDOWS_PLATFORM ? escapeWindowsPath : escapePosixPath; function escapeWindowsPath(pattern) { return pattern.replace(WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); } path$h.escapeWindowsPath = escapeWindowsPath; function escapePosixPath(pattern) { return pattern.replace(POSIX_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); } path$h.escapePosixPath = escapePosixPath; path$h.convertPathToPattern = IS_WINDOWS_PLATFORM ? convertWindowsPathToPattern : convertPosixPathToPattern; function convertWindowsPathToPattern(filepath) { return escapeWindowsPath(filepath) .replace(DOS_DEVICE_PATH_RE, '//$1') .replace(WINDOWS_BACKSLASHES_RE, '/'); } path$h.convertWindowsPathToPattern = convertWindowsPathToPattern; function convertPosixPathToPattern(filepath) { return escapePosixPath(filepath); } path$h.convertPosixPathToPattern = convertPosixPathToPattern; var pattern$1 = {}; /*! * is-extglob * * Copyright (c) 2014-2016, Jon Schlinkert. * Licensed under the MIT License. */ var isExtglob$1 = function isExtglob(str) { if (typeof str !== 'string' || str === '') { return false; } var match; while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) { if (match[2]) return true; str = str.slice(match.index + match[0].length); } return false; }; /*! * is-glob * * Copyright (c) 2014-2017, Jon Schlinkert. * Released under the MIT License. */ var isExtglob = isExtglob$1; var chars = { '{': '}', '(': ')', '[': ']'}; var strictCheck = function(str) { if (str[0] === '!') { return true; } var index = 0; var pipeIndex = -2; var closeSquareIndex = -2; var closeCurlyIndex = -2; var closeParenIndex = -2; var backSlashIndex = -2; while (index < str.length) { if (str[index] === '*') { return true; } if (str[index + 1] === '?' && /[\].+)]/.test(str[index])) { return true; } if (closeSquareIndex !== -1 && str[index] === '[' && str[index + 1] !== ']') { if (closeSquareIndex < index) { closeSquareIndex = str.indexOf(']', index); } if (closeSquareIndex > index) { if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { return true; } backSlashIndex = str.indexOf('\\', index); if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { return true; } } } if (closeCurlyIndex !== -1 && str[index] === '{' && str[index + 1] !== '}') { closeCurlyIndex = str.indexOf('}', index); if (closeCurlyIndex > index) { backSlashIndex = str.indexOf('\\', index); if (backSlashIndex === -1 || backSlashIndex > closeCurlyIndex) { return true; } } } if (closeParenIndex !== -1 && str[index] === '(' && str[index + 1] === '?' && /[:!=]/.test(str[index + 2]) && str[index + 3] !== ')') { closeParenIndex = str.indexOf(')', index); if (closeParenIndex > index) { backSlashIndex = str.indexOf('\\', index); if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { return true; } } } if (pipeIndex !== -1 && str[index] === '(' && str[index + 1] !== '|') { if (pipeIndex < index) { pipeIndex = str.indexOf('|', index); } if (pipeIndex !== -1 && str[pipeIndex + 1] !== ')') { closeParenIndex = str.indexOf(')', pipeIndex); if (closeParenIndex > pipeIndex) { backSlashIndex = str.indexOf('\\', pipeIndex); if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { return true; } } } } if (str[index] === '\\') { var open = str[index + 1]; index += 2; var close = chars[open]; if (close) { var n = str.indexOf(close, index); if (n !== -1) { index = n + 1; } } if (str[index] === '!') { return true; } } else { index++; } } return false; }; var relaxedCheck = function(str) { if (str[0] === '!') { return true; } var index = 0; while (index < str.length) { if (/[*?{}()[\]]/.test(str[index])) { return true; } if (str[index] === '\\') { var open = str[index + 1]; index += 2; var close = chars[open]; if (close) { var n = str.indexOf(close, index); if (n !== -1) { index = n + 1; } } if (str[index] === '!') { return true; } } else { index++; } } return false; }; var isGlob$2 = function isGlob(str, options) { if (typeof str !== 'string' || str === '') { return false; } if (isExtglob(str)) { return true; } var check = strictCheck; // optionally relax check if (options && options.strict === false) { check = relaxedCheck; } return check(str); }; var isGlob$1 = isGlob$2; var pathPosixDirname = require$$0$4.posix.dirname; var isWin32 = require$$2.platform() === 'win32'; var slash = '/'; var backslash = /\\/g; var enclosure = /[\{\[].*[\}\]]$/; var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/; var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g; /** * @param {string} str * @param {Object} opts * @param {boolean} [opts.flipBackslashes=true] * @returns {string} */ var globParent$2 = function globParent(str, opts) { var options = Object.assign({ flipBackslashes: true }, opts); // flip windows path separators if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { str = str.replace(backslash, slash); } // special case for strings ending in enclosure containing path separator if (enclosure.test(str)) { str += slash; } // preserves full path in case of trailing path separator str += 'a'; // remove path parts that are globby do { str = pathPosixDirname(str); } while (isGlob$1(str) || globby.test(str)); // remove escape chars and return result return str.replace(escaped, '$1'); }; var utils$f = {}; (function (exports) { exports.isInteger = num => { if (typeof num === 'number') { return Number.isInteger(num); } if (typeof num === 'string' && num.trim() !== '') { return Number.isInteger(Number(num)); } return false; }; /** * Find a node of the given type */ exports.find = (node, type) => node.nodes.find(node => node.type === type); /** * Find a node of the given type */ exports.exceedsLimit = (min, max, step = 1, limit) => { if (limit === false) return false; if (!exports.isInteger(min) || !exports.isInteger(max)) return false; return ((Number(max) - Number(min)) / Number(step)) >= limit; }; /** * Escape the given node with '\\' before node.value */ exports.escapeNode = (block, n = 0, type) => { let node = block.nodes[n]; if (!node) return; if ((type && node.type === type) || node.type === 'open' || node.type === 'close') { if (node.escaped !== true) { node.value = '\\' + node.value; node.escaped = true; } } }; /** * Returns true if the given brace node should be enclosed in literal braces */ exports.encloseBrace = node => { if (node.type !== 'brace') return false; if ((node.commas >> 0 + node.ranges >> 0) === 0) { node.invalid = true; return true; } return false; }; /** * Returns true if a brace node is invalid. */ exports.isInvalidBrace = block => { if (block.type !== 'brace') return false; if (block.invalid === true || block.dollar) return true; if ((block.commas >> 0 + block.ranges >> 0) === 0) { block.invalid = true; return true; } if (block.open !== true || block.close !== true) { block.invalid = true; return true; } return false; }; /** * Returns true if a node is an open or close node */ exports.isOpenOrClose = node => { if (node.type === 'open' || node.type === 'close') { return true; } return node.open === true || node.close === true; }; /** * Reduce an array of text nodes. */ exports.reduce = nodes => nodes.reduce((acc, node) => { if (node.type === 'text') acc.push(node.value); if (node.type === 'range') node.type = 'text'; return acc; }, []); /** * Flatten an array */ exports.flatten = (...args) => { const result = []; const flat = arr => { for (let i = 0; i < arr.length; i++) { let ele = arr[i]; Array.isArray(ele) ? flat(ele) : ele !== void 0 && result.push(ele); } return result; }; flat(args); return result; }; } (utils$f)); const utils$e = utils$f; var stringify$7 = (ast, options = {}) => { let stringify = (node, parent = {}) => { let invalidBlock = options.escapeInvalid && utils$e.isInvalidBrace(parent); let invalidNode = node.invalid === true && options.escapeInvalid === true; let output = ''; if (node.value) { if ((invalidBlock || invalidNode) && utils$e.isOpenOrClose(node)) { return '\\' + node.value; } return node.value; } if (node.value) { return node.value; } if (node.nodes) { for (let child of node.nodes) { output += stringify(child); } } return output; }; return stringify(ast); }; /*! * is-number * * Copyright (c) 2014-present, Jon Schlinkert. * Released under the MIT License. */ var isNumber$2 = function(num) { if (typeof num === 'number') { return num - num === 0; } if (typeof num === 'string' && num.trim() !== '') { return Number.isFinite ? Number.isFinite(+num) : isFinite(+num); } return false; }; /*! * to-regex-range * * Copyright (c) 2015-present, Jon Schlinkert. * Released under the MIT License. */ const isNumber$1 = isNumber$2; const toRegexRange$1 = (min, max, options) => { if (isNumber$1(min) === false) { throw new TypeError('toRegexRange: expected the first argument to be a number'); } if (max === void 0 || min === max) { return String(min); } if (isNumber$1(max) === false) { throw new TypeError('toRegexRange: expected the second argument to be a number.'); } let opts = { relaxZeros: true, ...options }; if (typeof opts.strictZeros === 'boolean') { opts.relaxZeros = opts.strictZeros === false; } let relax = String(opts.relaxZeros); let shorthand = String(opts.shorthand); let capture = String(opts.capture); let wrap = String(opts.wrap); let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap; if (toRegexRange$1.cache.hasOwnProperty(cacheKey)) { return toRegexRange$1.cache[cacheKey].result; } let a = Math.min(min, max); let b = Math.max(min, max); if (Math.abs(a - b) === 1) { let result = min + '|' + max; if (opts.capture) { return `(${result})`; } if (opts.wrap === false) { return result; } return `(?:${result})`; } let isPadded = hasPadding(min) || hasPadding(max); let state = { min, max, a, b }; let positives = []; let negatives = []; if (isPadded) { state.isPadded = isPadded; state.maxLen = String(state.max).length; } if (a < 0) { let newMin = b < 0 ? Math.abs(b) : 1; negatives = splitToPatterns(newMin, Math.abs(a), state, opts); a = state.a = 0; } if (b >= 0) { positives = splitToPatterns(a, b, state, opts); } state.negatives = negatives; state.positives = positives; state.result = collatePatterns(negatives, positives); if (opts.capture === true) { state.result = `(${state.result})`; } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) { state.result = `(?:${state.result})`; } toRegexRange$1.cache[cacheKey] = state; return state.result; }; function collatePatterns(neg, pos, options) { let onlyNegative = filterPatterns(neg, pos, '-', false) || []; let onlyPositive = filterPatterns(pos, neg, '', false) || []; let intersected = filterPatterns(neg, pos, '-?', true) || []; let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive); return subpatterns.join('|'); } function splitToRanges(min, max) { let nines = 1; let zeros = 1; let stop = countNines(min, nines); let stops = new Set([max]); while (min <= stop && stop <= max) { stops.add(stop); nines += 1; stop = countNines(min, nines); } stop = countZeros(max + 1, zeros) - 1; while (min < stop && stop <= max) { stops.add(stop); zeros += 1; stop = countZeros(max + 1, zeros) - 1; } stops = [...stops]; stops.sort(compare); return stops; } /** * Convert a range to a regex pattern * @param {Number} `start` * @param {Number} `stop` * @return {String} */ function rangeToPattern(start, stop, options) { if (start === stop) { return { pattern: start, count: [], digits: 0 }; } let zipped = zip(start, stop); let digits = zipped.length; let pattern = ''; let count = 0; for (let i = 0; i < digits; i++) { let [startDigit, stopDigit] = zipped[i]; if (startDigit === stopDigit) { pattern += startDigit; } else if (startDigit !== '0' || stopDigit !== '9') { pattern += toCharacterClass(startDigit, stopDigit); } else { count++; } } if (count) { pattern += options.shorthand === true ? '\\d' : '[0-9]'; } return { pattern, count: [count], digits }; } function splitToPatterns(min, max, tok, options) { let ranges = splitToRanges(min, max); let tokens = []; let start = min; let prev; for (let i = 0; i < ranges.length; i++) { let max = ranges[i]; let obj = rangeToPattern(String(start), String(max), options); let zeros = ''; if (!tok.isPadded && prev && prev.pattern === obj.pattern) { if (prev.count.length > 1) { prev.count.pop(); } prev.count.push(obj.count[0]); prev.string = prev.pattern + toQuantifier(prev.count); start = max + 1; continue; } if (tok.isPadded) { zeros = padZeros(max, tok, options); } obj.string = zeros + obj.pattern + toQuantifier(obj.count); tokens.push(obj); start = max + 1; prev = obj; } return tokens; } function filterPatterns(arr, comparison, prefix, intersection, options) { let result = []; for (let ele of arr) { let { string } = ele; // only push if _both_ are negative... if (!intersection && !contains(comparison, 'string', string)) { result.push(prefix + string); } // or _both_ are positive if (intersection && contains(comparison, 'string', string)) { result.push(prefix + string); } } return result; } /** * Zip strings */ function zip(a, b) { let arr = []; for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]); return arr; } function compare(a, b) { return a > b ? 1 : b > a ? -1 : 0; } function contains(arr, key, val) { return arr.some(ele => ele[key] === val); } function countNines(min, len) { return Number(String(min).slice(0, -len) + '9'.repeat(len)); } function countZeros(integer, zeros) { return integer - (integer % Math.pow(10, zeros)); } function toQuantifier(digits) { let [start = 0, stop = ''] = digits; if (stop || start > 1) { return `{${start + (stop ? ',' + stop : '')}}`; } return ''; } function toCharacterClass(a, b, options) { return `[${a}${(b - a === 1) ? '' : '-'}${b}]`; } function hasPadding(str) { return /^-?(0+)\d/.test(str); } function padZeros(value, tok, options) { if (!tok.isPadded) { return value; } let diff = Math.abs(tok.maxLen - String(value).length); let relax = options.relaxZeros !== false; switch (diff) { case 0: return ''; case 1: return relax ? '0?' : '0'; case 2: return relax ? '0{0,2}' : '00'; default: { return relax ? `0{0,${diff}}` : `0{${diff}}`; } } } /** * Cache */ toRegexRange$1.cache = {}; toRegexRange$1.clearCache = () => (toRegexRange$1.cache = {}); /** * Expose `toRegexRange` */ var toRegexRange_1 = toRegexRange$1; /*! * fill-range * * Copyright (c) 2014-present, Jon Schlinkert. * Licensed under the MIT License. */ const util$1 = require$$0$6; const toRegexRange = toRegexRange_1; const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); const transform = toNumber => { return value => toNumber === true ? Number(value) : String(value); }; const isValidValue = value => { return typeof value === 'number' || (typeof value === 'string' && value !== ''); }; const isNumber = num => Number.isInteger(+num); const zeros = input => { let value = `${input}`; let index = -1; if (value[0] === '-') value = value.slice(1); if (value === '0') return false; while (value[++index] === '0'); return index > 0; }; const stringify$6 = (start, end, options) => { if (typeof start === 'string' || typeof end === 'string') { return true; } return options.stringify === true; }; const pad = (input, maxLength, toNumber) => { if (maxLength > 0) { let dash = input[0] === '-' ? '-' : ''; if (dash) input = input.slice(1); input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0')); } if (toNumber === false) { return String(input); } return input; }; const toMaxLen = (input, maxLength) => { let negative = input[0] === '-' ? '-' : ''; if (negative) { input = input.slice(1); maxLength--; } while (input.length < maxLength) input = '0' + input; return negative ? ('-' + input) : input; }; const toSequence = (parts, options) => { parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); let prefix = options.capture ? '' : '?:'; let positives = ''; let negatives = ''; let result; if (parts.positives.length) { positives = parts.positives.join('|'); } if (parts.negatives.length) { negatives = `-(${prefix}${parts.negatives.join('|')})`; } if (positives && negatives) { result = `${positives}|${negatives}`; } else { result = positives || negatives; } if (options.wrap) { return `(${prefix}${result})`; } return result; }; const toRange = (a, b, isNumbers, options) => { if (isNumbers) { return toRegexRange(a, b, { wrap: false, ...options }); } let start = String.fromCharCode(a); if (a === b) return start; let stop = String.fromCharCode(b); return `[${start}-${stop}]`; }; const toRegex = (start, end, options) => { if (Array.isArray(start)) { let wrap = options.wrap === true; let prefix = options.capture ? '' : '?:'; return wrap ? `(${prefix}${start.join('|')})` : start.join('|'); } return toRegexRange(start, end, options); }; const rangeError = (...args) => { return new RangeError('Invalid range arguments: ' + util$1.inspect(...args)); }; const invalidRange = (start, end, options) => { if (options.strictRanges === true) throw rangeError([start, end]); return []; }; const invalidStep = (step, options) => { if (options.strictRanges === true) { throw new TypeError(`Expected step "${step}" to be a number`); } return []; }; const fillNumbers = (start, end, step = 1, options = {}) => { let a = Number(start); let b = Number(end); if (!Number.isInteger(a) || !Number.isInteger(b)) { if (options.strictRanges === true) throw rangeError([start, end]); return []; } // fix negative zero if (a === 0) a = 0; if (b === 0) b = 0; let descending = a > b; let startString = String(start); let endString = String(end); let stepString = String(step); step = Math.max(Math.abs(step), 1); let padded = zeros(startString) || zeros(endString) || zeros(stepString); let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0; let toNumber = padded === false && stringify$6(start, end, options) === false; let format = options.transform || transform(toNumber); if (options.toRegex && step === 1) { return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options); } let parts = { negatives: [], positives: [] }; let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num)); let range = []; let index = 0; while (descending ? a >= b : a <= b) { if (options.toRegex === true && step > 1) { push(a); } else { range.push(pad(format(a, index), maxLen, toNumber)); } a = descending ? a - step : a + step; index++; } if (options.toRegex === true) { return step > 1 ? toSequence(parts, options) : toRegex(range, null, { wrap: false, ...options }); } return range; }; const fillLetters = (start, end, step = 1, options = {}) => { if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) { return invalidRange(start, end, options); } let format = options.transform || (val => String.fromCharCode(val)); let a = `${start}`.charCodeAt(0); let b = `${end}`.charCodeAt(0); let descending = a > b; let min = Math.min(a, b); let max = Math.max(a, b); if (options.toRegex && step === 1) { return toRange(min, max, false, options); } let range = []; let index = 0; while (descending ? a >= b : a <= b) { range.push(format(a, index)); a = descending ? a - step : a + step; index++; } if (options.toRegex === true) { return toRegex(range, null, { wrap: false, options }); } return range; }; const fill$2 = (start, end, step, options = {}) => { if (end == null && isValidValue(start)) { return [start]; } if (!isValidValue(start) || !isValidValue(end)) { return invalidRange(start, end, options); } if (typeof step === 'function') { return fill$2(start, end, 1, { transform: step }); } if (isObject(step)) { return fill$2(start, end, 0, step); } let opts = { ...options }; if (opts.capture === true) opts.wrap = true; step = step || opts.step || 1; if (!isNumber(step)) { if (step != null && !isObject(step)) return invalidStep(step, opts); return fill$2(start, end, 1, step); } if (isNumber(start) && isNumber(end)) { return fillNumbers(start, end, step, opts); } return fillLetters(start, end, Math.max(Math.abs(step), 1), opts); }; var fillRange = fill$2; const fill$1 = fillRange; const utils$d = utils$f; const compile$1 = (ast, options = {}) => { let walk = (node, parent = {}) => { let invalidBlock = utils$d.isInvalidBrace(parent); let invalidNode = node.invalid === true && options.escapeInvalid === true; let invalid = invalidBlock === true || invalidNode === true; let prefix = options.escapeInvalid === true ? '\\' : ''; let output = ''; if (node.isOpen === true) { return prefix + node.value; } if (node.isClose === true) { return prefix + node.value; } if (node.type === 'open') { return invalid ? (prefix + node.value) : '('; } if (node.type === 'close') { return invalid ? (prefix + node.value) : ')'; } if (node.type === 'comma') { return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|'); } if (node.value) { return node.value; } if (node.nodes && node.ranges > 0) { let args = utils$d.reduce(node.nodes); let range = fill$1(...args, { ...options, wrap: false, toRegex: true }); if (range.length !== 0) { return args.length > 1 && range.length > 1 ? `(${range})` : range; } } if (node.nodes) { for (let child of node.nodes) { output += walk(child, node); } } return output; }; return walk(ast); }; var compile_1 = compile$1; const fill = fillRange; const stringify$5 = stringify$7; const utils$c = utils$f; const append$1 = (queue = '', stash = '', enclose = false) => { let result = []; queue = [].concat(queue); stash = [].concat(stash); if (!stash.length) return queue; if (!queue.length) { return enclose ? utils$c.flatten(stash).map(ele => `{${ele}}`) : stash; } for (let item of queue) { if (Array.isArray(item)) { for (let value of item) { result.push(append$1(value, stash, enclose)); } } else { for (let ele of stash) { if (enclose === true && typeof ele === 'string') ele = `{${ele}}`; result.push(Array.isArray(ele) ? append$1(item, ele, enclose) : (item + ele)); } } } return utils$c.flatten(result); }; const expand$2 = (ast, options = {}) => { let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit; let walk = (node, parent = {}) => { node.queue = []; let p = parent; let q = parent.queue; while (p.type !== 'brace' && p.type !== 'root' && p.parent) { p = p.parent; q = p.queue; } if (node.invalid || node.dollar) { q.push(append$1(q.pop(), stringify$5(node, options))); return; } if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) { q.push(append$1(q.pop(), ['{}'])); return; } if (node.nodes && node.ranges > 0) { let args = utils$c.reduce(node.nodes); if (utils$c.exceedsLimit(...args, options.step, rangeLimit)) { throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.'); } let range = fill(...args, options); if (range.length === 0) { range = stringify$5(node, options); } q.push(append$1(q.pop(), range)); node.nodes = []; return; } let enclose = utils$c.encloseBrace(node); let queue = node.queue; let block = node; while (block.type !== 'brace' && block.type !== 'root' && block.parent) { block = block.parent; queue = block.queue; } for (let i = 0; i < node.nodes.length; i++) { let child = node.nodes[i]; if (child.type === 'comma' && node.type === 'brace') { if (i === 1) queue.push(''); queue.push(''); continue; } if (child.type === 'close') { q.push(append$1(q.pop(), queue, enclose)); continue; } if (child.value && child.type !== 'open') { queue.push(append$1(queue.pop(), child.value)); continue; } if (child.nodes) { walk(child, node); } } return queue; }; return utils$c.flatten(walk(ast)); }; var expand_1$1 = expand$2; var constants$3 = { MAX_LENGTH: 1024 * 64, // Digits CHAR_0: '0', /* 0 */ CHAR_9: '9', /* 9 */ // Alphabet chars. CHAR_UPPERCASE_A: 'A', /* A */ CHAR_LOWERCASE_A: 'a', /* a */ CHAR_UPPERCASE_Z: 'Z', /* Z */ CHAR_LOWERCASE_Z: 'z', /* z */ CHAR_LEFT_PARENTHESES: '(', /* ( */ CHAR_RIGHT_PARENTHESES: ')', /* ) */ CHAR_ASTERISK: '*', /* * */ // Non-alphabetic chars. CHAR_AMPERSAND: '&', /* & */ CHAR_AT: '@', /* @ */ CHAR_BACKSLASH: '\\', /* \ */ CHAR_BACKTICK: '`', /* ` */ CHAR_CARRIAGE_RETURN: '\r', /* \r */ CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */ CHAR_COLON: ':', /* : */ CHAR_COMMA: ',', /* , */ CHAR_DOLLAR: '$', /* . */ CHAR_DOT: '.', /* . */ CHAR_DOUBLE_QUOTE: '"', /* " */ CHAR_EQUAL: '=', /* = */ CHAR_EXCLAMATION_MARK: '!', /* ! */ CHAR_FORM_FEED: '\f', /* \f */ CHAR_FORWARD_SLASH: '/', /* / */ CHAR_HASH: '#', /* # */ CHAR_HYPHEN_MINUS: '-', /* - */ CHAR_LEFT_ANGLE_BRACKET: '<', /* < */ CHAR_LEFT_CURLY_BRACE: '{', /* { */ CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */ CHAR_LINE_FEED: '\n', /* \n */ CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */ CHAR_PERCENT: '%', /* % */ CHAR_PLUS: '+', /* + */ CHAR_QUESTION_MARK: '?', /* ? */ CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */ CHAR_RIGHT_CURLY_BRACE: '}', /* } */ CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */ CHAR_SEMICOLON: ';', /* ; */ CHAR_SINGLE_QUOTE: '\'', /* ' */ CHAR_SPACE: ' ', /* */ CHAR_TAB: '\t', /* \t */ CHAR_UNDERSCORE: '_', /* _ */ CHAR_VERTICAL_LINE: '|', /* | */ CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */ }; const stringify$4 = stringify$7; /** * Constants */ const { MAX_LENGTH, CHAR_BACKSLASH, /* \ */ CHAR_BACKTICK, /* ` */ CHAR_COMMA, /* , */ CHAR_DOT, /* . */ CHAR_LEFT_PARENTHESES, /* ( */ CHAR_RIGHT_PARENTHESES, /* ) */ CHAR_LEFT_CURLY_BRACE, /* { */ CHAR_RIGHT_CURLY_BRACE, /* } */ CHAR_LEFT_SQUARE_BRACKET, /* [ */ CHAR_RIGHT_SQUARE_BRACKET, /* ] */ CHAR_DOUBLE_QUOTE, /* " */ CHAR_SINGLE_QUOTE, /* ' */ CHAR_NO_BREAK_SPACE, CHAR_ZERO_WIDTH_NOBREAK_SPACE } = constants$3; /** * parse */ const parse$d = (input, options = {}) => { if (typeof input !== 'string') { throw new TypeError('Expected a string'); } let opts = options || {}; let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; if (input.length > max) { throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`); } let ast = { type: 'root', input, nodes: [] }; let stack = [ast]; let block = ast; let prev = ast; let brackets = 0; let length = input.length; let index = 0; let depth = 0; let value; /** * Helpers */ const advance = () => input[index++]; const push = node => { if (node.type === 'text' && prev.type === 'dot') { prev.type = 'text'; } if (prev && prev.type === 'text' && node.type === 'text') { prev.value += node.value; return; } block.nodes.push(node); node.parent = block; node.prev = prev; prev = node; return node; }; push({ type: 'bos' }); while (index < length) { block = stack[stack.length - 1]; value = advance(); /** * Invalid chars */ if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) { continue; } /** * Escaped chars */ if (value === CHAR_BACKSLASH) { push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() }); continue; } /** * Right square bracket (literal): ']' */ if (value === CHAR_RIGHT_SQUARE_BRACKET) { push({ type: 'text', value: '\\' + value }); continue; } /** * Left square bracket: '[' */ if (value === CHAR_LEFT_SQUARE_BRACKET) { brackets++; let next; while (index < length && (next = advance())) { value += next; if (next === CHAR_LEFT_SQUARE_BRACKET) { brackets++; continue; } if (next === CHAR_BACKSLASH) { value += advance(); continue; } if (next === CHAR_RIGHT_SQUARE_BRACKET) { brackets--; if (brackets === 0) { break; } } } push({ type: 'text', value }); continue; } /** * Parentheses */ if (value === CHAR_LEFT_PARENTHESES) { block = push({ type: 'paren', nodes: [] }); stack.push(block); push({ type: 'text', value }); continue; } if (value === CHAR_RIGHT_PARENTHESES) { if (block.type !== 'paren') { push({ type: 'text', value }); continue; } block = stack.pop(); push({ type: 'text', value }); block = stack[stack.length - 1]; continue; } /** * Quotes: '|"|` */ if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) { let open = value; let next; if (options.keepQuotes !== true) { value = ''; } while (index < length && (next = advance())) { if (next === CHAR_BACKSLASH) { value += next + advance(); continue; } if (next === open) { if (options.keepQuotes === true) value += next; break; } value += next; } push({ type: 'text', value }); continue; } /** * Left curly brace: '{' */ if (value === CHAR_LEFT_CURLY_BRACE) { depth++; let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true; let brace = { type: 'brace', open: true, close: false, dollar, depth, commas: 0, ranges: 0, nodes: [] }; block = push(brace); stack.push(block); push({ type: 'open', value }); continue; } /** * Right curly brace: '}' */ if (value === CHAR_RIGHT_CURLY_BRACE) { if (block.type !== 'brace') { push({ type: 'text', value }); continue; } let type = 'close'; block = stack.pop(); block.close = true; push({ type, value }); depth--; block = stack[stack.length - 1]; continue; } /** * Comma: ',' */ if (value === CHAR_COMMA && depth > 0) { if (block.ranges > 0) { block.ranges = 0; let open = block.nodes.shift(); block.nodes = [open, { type: 'text', value: stringify$4(block) }]; } push({ type: 'comma', value }); block.commas++; continue; } /** * Dot: '.' */ if (value === CHAR_DOT && depth > 0 && block.commas === 0) { let siblings = block.nodes; if (depth === 0 || siblings.length === 0) { push({ type: 'text', value }); continue; } if (prev.type === 'dot') { block.range = []; prev.value += value; prev.type = 'range'; if (block.nodes.length !== 3 && block.nodes.length !== 5) { block.invalid = true; block.ranges = 0; prev.type = 'text'; continue; } block.ranges++; block.args = []; continue; } if (prev.type === 'range') { siblings.pop(); let before = siblings[siblings.length - 1]; before.value += prev.value + value; prev = before; block.ranges--; continue; } push({ type: 'dot', value }); continue; } /** * Text */ push({ type: 'text', value }); } // Mark imbalanced braces and brackets as invalid do { block = stack.pop(); if (block.type !== 'root') { block.nodes.forEach(node => { if (!node.nodes) { if (node.type === 'open') node.isOpen = true; if (node.type === 'close') node.isClose = true; if (!node.nodes) node.type = 'text'; node.invalid = true; } }); // get the location of the block on parent.nodes (block's siblings) let parent = stack[stack.length - 1]; let index = parent.nodes.indexOf(block); // replace the (invalid) block with it's nodes parent.nodes.splice(index, 1, ...block.nodes); } } while (stack.length > 0); push({ type: 'eos' }); return ast; }; var parse_1$2 = parse$d; const stringify$3 = stringify$7; const compile = compile_1; const expand$1 = expand_1$1; const parse$c = parse_1$2; /** * Expand the given pattern or create a regex-compatible string. * * ```js * const braces = require('braces'); * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)'] * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c'] * ``` * @param {String} `str` * @param {Object} `options` * @return {String} * @api public */ const braces$2 = (input, options = {}) => { let output = []; if (Array.isArray(input)) { for (let pattern of input) { let result = braces$2.create(pattern, options); if (Array.isArray(result)) { output.push(...result); } else { output.push(result); } } } else { output = [].concat(braces$2.create(input, options)); } if (options && options.expand === true && options.nodupes === true) { output = [...new Set(output)]; } return output; }; /** * Parse the given `str` with the given `options`. * * ```js * // braces.parse(pattern, [, options]); * const ast = braces.parse('a/{b,c}/d'); * console.log(ast); * ``` * @param {String} pattern Brace pattern to parse * @param {Object} options * @return {Object} Returns an AST * @api public */ braces$2.parse = (input, options = {}) => parse$c(input, options); /** * Creates a braces string from an AST, or an AST node. * * ```js * const braces = require('braces'); * let ast = braces.parse('foo/{a,b}/bar'); * console.log(stringify(ast.nodes[2])); //=> '{a,b}' * ``` * @param {String} `input` Brace pattern or AST. * @param {Object} `options` * @return {Array} Returns an array of expanded values. * @api public */ braces$2.stringify = (input, options = {}) => { if (typeof input === 'string') { return stringify$3(braces$2.parse(input, options), options); } return stringify$3(input, options); }; /** * Compiles a brace pattern into a regex-compatible, optimized string. * This method is called by the main [braces](#braces) function by default. * * ```js * const braces = require('braces'); * console.log(braces.compile('a/{b,c}/d')); * //=> ['a/(b|c)/d'] * ``` * @param {String} `input` Brace pattern or AST. * @param {Object} `options` * @return {Array} Returns an array of expanded values. * @api public */ braces$2.compile = (input, options = {}) => { if (typeof input === 'string') { input = braces$2.parse(input, options); } return compile(input, options); }; /** * Expands a brace pattern into an array. This method is called by the * main [braces](#braces) function when `options.expand` is true. Before * using this method it's recommended that you read the [performance notes](#performance)) * and advantages of using [.compile](#compile) instead. * * ```js * const braces = require('braces'); * console.log(braces.expand('a/{b,c}/d')); * //=> ['a/b/d', 'a/c/d']; * ``` * @param {String} `pattern` Brace pattern * @param {Object} `options` * @return {Array} Returns an array of expanded values. * @api public */ braces$2.expand = (input, options = {}) => { if (typeof input === 'string') { input = braces$2.parse(input, options); } let result = expand$1(input, options); // filter out empty strings if specified if (options.noempty === true) { result = result.filter(Boolean); } // filter out duplicates if specified if (options.nodupes === true) { result = [...new Set(result)]; } return result; }; /** * Processes a brace pattern and returns either an expanded array * (if `options.expand` is true), a highly optimized regex-compatible string. * This method is called by the main [braces](#braces) function. * * ```js * const braces = require('braces'); * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}')) * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)' * ``` * @param {String} `pattern` Brace pattern * @param {Object} `options` * @return {Array} Returns an array of expanded values. * @api public */ braces$2.create = (input, options = {}) => { if (input === '' || input.length < 3) { return [input]; } return options.expand !== true ? braces$2.compile(input, options) : braces$2.expand(input, options); }; /** * Expose "braces" */ var braces_1 = braces$2; const util = require$$0$6; const braces$1 = braces_1; const picomatch$2 = picomatch$3; const utils$b = utils$k; const isEmptyString = val => val === '' || val === './'; /** * Returns an array of strings that match one or more glob patterns. * * ```js * const mm = require('micromatch'); * // mm(list, patterns[, options]); * * console.log(mm(['a.js', 'a.txt'], ['*.js'])); * //=> [ 'a.js' ] * ``` * @param {String|Array} `list` List of strings to match. * @param {String|Array} `patterns` One or more glob patterns to use for matching. * @param {Object} `options` See available [options](#options) * @return {Array} Returns an array of matches * @summary false * @api public */ const micromatch$1 = (list, patterns, options) => { patterns = [].concat(patterns); list = [].concat(list); let omit = new Set(); let keep = new Set(); let items = new Set(); let negatives = 0; let onResult = state => { items.add(state.output); if (options && options.onResult) { options.onResult(state); } }; for (let i = 0; i < patterns.length; i++) { let isMatch = picomatch$2(String(patterns[i]), { ...options, onResult }, true); let negated = isMatch.state.negated || isMatch.state.negatedExtglob; if (negated) negatives++; for (let item of list) { let matched = isMatch(item, true); let match = negated ? !matched.isMatch : matched.isMatch; if (!match) continue; if (negated) { omit.add(matched.output); } else { omit.delete(matched.output); keep.add(matched.output); } } } let result = negatives === patterns.length ? [...items] : [...keep]; let matches = result.filter(item => !omit.has(item)); if (options && matches.length === 0) { if (options.failglob === true) { throw new Error(`No matches found for "${patterns.join(', ')}"`); } if (options.nonull === true || options.nullglob === true) { return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns; } } return matches; }; /** * Backwards compatibility */ micromatch$1.match = micromatch$1; /** * Returns a matcher function from the given glob `pattern` and `options`. * The returned function takes a string to match as its only argument and returns * true if the string is a match. * * ```js * const mm = require('micromatch'); * // mm.matcher(pattern[, options]); * * const isMatch = mm.matcher('*.!(*a)'); * console.log(isMatch('a.a')); //=> false * console.log(isMatch('a.b')); //=> true * ``` * @param {String} `pattern` Glob pattern * @param {Object} `options` * @return {Function} Returns a matcher function. * @api public */ micromatch$1.matcher = (pattern, options) => picomatch$2(pattern, options); /** * Returns true if **any** of the given glob `patterns` match the specified `string`. * * ```js * const mm = require('micromatch'); * // mm.isMatch(string, patterns[, options]); * * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true * console.log(mm.isMatch('a.a', 'b.*')); //=> false * ``` * @param {String} `str` The string to test. * @param {String|Array} `patterns` One or more glob patterns to use for matching. * @param {Object} `[options]` See available [options](#options). * @return {Boolean} Returns true if any patterns match `str` * @api public */ micromatch$1.isMatch = (str, patterns, options) => picomatch$2(patterns, options)(str); /** * Backwards compatibility */ micromatch$1.any = micromatch$1.isMatch; /** * Returns a list of strings that _**do not match any**_ of the given `patterns`. * * ```js * const mm = require('micromatch'); * // mm.not(list, patterns[, options]); * * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); * //=> ['b.b', 'c.c'] * ``` * @param {Array} `list` Array of strings to match. * @param {String|Array} `patterns` One or more glob pattern to use for matching. * @param {Object} `options` See available [options](#options) for changing how matches are performed * @return {Array} Returns an array of strings that **do not match** the given patterns. * @api public */ micromatch$1.not = (list, patterns, options = {}) => { patterns = [].concat(patterns).map(String); let result = new Set(); let items = []; let onResult = state => { if (options.onResult) options.onResult(state); items.push(state.output); }; let matches = new Set(micromatch$1(list, patterns, { ...options, onResult })); for (let item of items) { if (!matches.has(item)) { result.add(item); } } return [...result]; }; /** * Returns true if the given `string` contains the given pattern. Similar * to [.isMatch](#isMatch) but the pattern can match any part of the string. * * ```js * var mm = require('micromatch'); * // mm.contains(string, pattern[, options]); * * console.log(mm.contains('aa/bb/cc', '*b')); * //=> true * console.log(mm.contains('aa/bb/cc', '*d')); * //=> false * ``` * @param {String} `str` The string to match. * @param {String|Array} `patterns` Glob pattern to use for matching. * @param {Object} `options` See available [options](#options) for changing how matches are performed * @return {Boolean} Returns true if any of the patterns matches any part of `str`. * @api public */ micromatch$1.contains = (str, pattern, options) => { if (typeof str !== 'string') { throw new TypeError(`Expected a string: "${util.inspect(str)}"`); } if (Array.isArray(pattern)) { return pattern.some(p => micromatch$1.contains(str, p, options)); } if (typeof pattern === 'string') { if (isEmptyString(str) || isEmptyString(pattern)) { return false; } if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) { return true; } } return micromatch$1.isMatch(str, pattern, { ...options, contains: true }); }; /** * Filter the keys of the given object with the given `glob` pattern * and `options`. Does not attempt to match nested keys. If you need this feature, * use [glob-object][] instead. * * ```js * const mm = require('micromatch'); * // mm.matchKeys(object, patterns[, options]); * * const obj = { aa: 'a', ab: 'b', ac: 'c' }; * console.log(mm.matchKeys(obj, '*b')); * //=> { ab: 'b' } * ``` * @param {Object} `object` The object with keys to filter. * @param {String|Array} `patterns` One or more glob patterns to use for matching. * @param {Object} `options` See available [options](#options) for changing how matches are performed * @return {Object} Returns an object with only keys that match the given patterns. * @api public */ micromatch$1.matchKeys = (obj, patterns, options) => { if (!utils$b.isObject(obj)) { throw new TypeError('Expected the first argument to be an object'); } let keys = micromatch$1(Object.keys(obj), patterns, options); let res = {}; for (let key of keys) res[key] = obj[key]; return res; }; /** * Returns true if some of the strings in the given `list` match any of the given glob `patterns`. * * ```js * const mm = require('micromatch'); * // mm.some(list, patterns[, options]); * * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); * // true * console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); * // false * ``` * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found. * @param {String|Array} `patterns` One or more glob patterns to use for matching. * @param {Object} `options` See available [options](#options) for changing how matches are performed * @return {Boolean} Returns true if any `patterns` matches any of the strings in `list` * @api public */ micromatch$1.some = (list, patterns, options) => { let items = [].concat(list); for (let pattern of [].concat(patterns)) { let isMatch = picomatch$2(String(pattern), options); if (items.some(item => isMatch(item))) { return true; } } return false; }; /** * Returns true if every string in the given `list` matches * any of the given glob `patterns`. * * ```js * const mm = require('micromatch'); * // mm.every(list, patterns[, options]); * * console.log(mm.every('foo.js', ['foo.js'])); * // true * console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); * // true * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); * // false * console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); * // false * ``` * @param {String|Array} `list` The string or array of strings to test. * @param {String|Array} `patterns` One or more glob patterns to use for matching. * @param {Object} `options` See available [options](#options) for changing how matches are performed * @return {Boolean} Returns true if all `patterns` matches all of the strings in `list` * @api public */ micromatch$1.every = (list, patterns, options) => { let items = [].concat(list); for (let pattern of [].concat(patterns)) { let isMatch = picomatch$2(String(pattern), options); if (!items.every(item => isMatch(item))) { return false; } } return true; }; /** * Returns true if **all** of the given `patterns` match * the specified string. * * ```js * const mm = require('micromatch'); * // mm.all(string, patterns[, options]); * * console.log(mm.all('foo.js', ['foo.js'])); * // true * * console.log(mm.all('foo.js', ['*.js', '!foo.js'])); * // false * * console.log(mm.all('foo.js', ['*.js', 'foo.js'])); * // true * * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); * // true * ``` * @param {String|Array} `str` The string to test. * @param {String|Array} `patterns` One or more glob patterns to use for matching. * @param {Object} `options` See available [options](#options) for changing how matches are performed * @return {Boolean} Returns true if any patterns match `str` * @api public */ micromatch$1.all = (str, patterns, options) => { if (typeof str !== 'string') { throw new TypeError(`Expected a string: "${util.inspect(str)}"`); } return [].concat(patterns).every(p => picomatch$2(p, options)(str)); }; /** * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match. * * ```js * const mm = require('micromatch'); * // mm.capture(pattern, string[, options]); * * console.log(mm.capture('test/*.js', 'test/foo.js')); * //=> ['foo'] * console.log(mm.capture('test/*.js', 'foo/bar.css')); * //=> null * ``` * @param {String} `glob` Glob pattern to use for matching. * @param {String} `input` String to match * @param {Object} `options` See available [options](#options) for changing how matches are performed * @return {Array|null} Returns an array of captures if the input matches the glob pattern, otherwise `null`. * @api public */ micromatch$1.capture = (glob, input, options) => { let posix = utils$b.isWindows(options); let regex = picomatch$2.makeRe(String(glob), { ...options, capture: true }); let match = regex.exec(posix ? utils$b.toPosixSlashes(input) : input); if (match) { return match.slice(1).map(v => v === void 0 ? '' : v); } }; /** * Create a regular expression from the given glob `pattern`. * * ```js * const mm = require('micromatch'); * // mm.makeRe(pattern[, options]); * * console.log(mm.makeRe('*.js')); * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ * ``` * @param {String} `pattern` A glob pattern to convert to regex. * @param {Object} `options` * @return {RegExp} Returns a regex created from the given pattern. * @api public */ micromatch$1.makeRe = (...args) => picomatch$2.makeRe(...args); /** * Scan a glob pattern to separate the pattern into segments. Used * by the [split](#split) method. * * ```js * const mm = require('micromatch'); * const state = mm.scan(pattern[, options]); * ``` * @param {String} `pattern` * @param {Object} `options` * @return {Object} Returns an object with * @api public */ micromatch$1.scan = (...args) => picomatch$2.scan(...args); /** * Parse a glob pattern to create the source string for a regular * expression. * * ```js * const mm = require('micromatch'); * const state = mm.parse(pattern[, options]); * ``` * @param {String} `glob` * @param {Object} `options` * @return {Object} Returns an object with useful properties and output to be used as regex source string. * @api public */ micromatch$1.parse = (patterns, options) => { let res = []; for (let pattern of [].concat(patterns || [])) { for (let str of braces$1(String(pattern), options)) { res.push(picomatch$2.parse(str, options)); } } return res; }; /** * Process the given brace `pattern`. * * ```js * const { braces } = require('micromatch'); * console.log(braces('foo/{a,b,c}/bar')); * //=> [ 'foo/(a|b|c)/bar' ] * * console.log(braces('foo/{a,b,c}/bar', { expand: true })); * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] * ``` * @param {String} `pattern` String with brace pattern to process. * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options. * @return {Array} * @api public */ micromatch$1.braces = (pattern, options) => { if (typeof pattern !== 'string') throw new TypeError('Expected a string'); if ((options && options.nobrace === true) || !/\{.*\}/.test(pattern)) { return [pattern]; } return braces$1(pattern, options); }; /** * Expand braces */ micromatch$1.braceExpand = (pattern, options) => { if (typeof pattern !== 'string') throw new TypeError('Expected a string'); return micromatch$1.braces(pattern, { ...options, expand: true }); }; /** * Expose micromatch */ var micromatch_1 = micromatch$1; var micromatch$2 = /*@__PURE__*/getDefaultExportFromCjs(micromatch_1); Object.defineProperty(pattern$1, "__esModule", { value: true }); pattern$1.removeDuplicateSlashes = pattern$1.matchAny = pattern$1.convertPatternsToRe = pattern$1.makeRe = pattern$1.getPatternParts = pattern$1.expandBraceExpansion = pattern$1.expandPatternsWithBraceExpansion = pattern$1.isAffectDepthOfReadingPattern = pattern$1.endsWithSlashGlobStar = pattern$1.hasGlobStar = pattern$1.getBaseDirectory = pattern$1.isPatternRelatedToParentDirectory = pattern$1.getPatternsOutsideCurrentDirectory = pattern$1.getPatternsInsideCurrentDirectory = pattern$1.getPositivePatterns = pattern$1.getNegativePatterns = pattern$1.isPositivePattern = pattern$1.isNegativePattern = pattern$1.convertToNegativePattern = pattern$1.convertToPositivePattern = pattern$1.isDynamicPattern = pattern$1.isStaticPattern = void 0; const path$f = require$$0$4; const globParent$1 = globParent$2; const micromatch = micromatch_1; const GLOBSTAR$1 = '**'; const ESCAPE_SYMBOL = '\\'; const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/; const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[[^[]*]/; const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\([^(]*\|[^|]*\)/; const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\([^(]*\)/; const BRACE_EXPANSION_SEPARATORS_RE = /,|\.\./; /** * Matches a sequence of two or more consecutive slashes, excluding the first two slashes at the beginning of the string. * The latter is due to the presence of the device path at the beginning of the UNC path. */ const DOUBLE_SLASH_RE$1 = /(?!^)\/{2,}/g; function isStaticPattern(pattern, options = {}) { return !isDynamicPattern(pattern, options); } pattern$1.isStaticPattern = isStaticPattern; function isDynamicPattern(pattern, options = {}) { /** * A special case with an empty string is necessary for matching patterns that start with a forward slash. * An empty string cannot be a dynamic pattern. * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'. */ if (pattern === '') { return false; } /** * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check * filepath directly (without read directory). */ if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) { return true; } if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) { return true; } if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) { return true; } if (options.braceExpansion !== false && hasBraceExpansion(pattern)) { return true; } return false; } pattern$1.isDynamicPattern = isDynamicPattern; function hasBraceExpansion(pattern) { const openingBraceIndex = pattern.indexOf('{'); if (openingBraceIndex === -1) { return false; } const closingBraceIndex = pattern.indexOf('}', openingBraceIndex + 1); if (closingBraceIndex === -1) { return false; } const braceContent = pattern.slice(openingBraceIndex, closingBraceIndex); return BRACE_EXPANSION_SEPARATORS_RE.test(braceContent); } function convertToPositivePattern(pattern) { return isNegativePattern(pattern) ? pattern.slice(1) : pattern; } pattern$1.convertToPositivePattern = convertToPositivePattern; function convertToNegativePattern(pattern) { return '!' + pattern; } pattern$1.convertToNegativePattern = convertToNegativePattern; function isNegativePattern(pattern) { return pattern.startsWith('!') && pattern[1] !== '('; } pattern$1.isNegativePattern = isNegativePattern; function isPositivePattern(pattern) { return !isNegativePattern(pattern); } pattern$1.isPositivePattern = isPositivePattern; function getNegativePatterns(patterns) { return patterns.filter(isNegativePattern); } pattern$1.getNegativePatterns = getNegativePatterns; function getPositivePatterns$1(patterns) { return patterns.filter(isPositivePattern); } pattern$1.getPositivePatterns = getPositivePatterns$1; /** * Returns patterns that can be applied inside the current directory. * * @example * // ['./*', '*', 'a/*'] * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) */ function getPatternsInsideCurrentDirectory(patterns) { return patterns.filter((pattern) => !isPatternRelatedToParentDirectory(pattern)); } pattern$1.getPatternsInsideCurrentDirectory = getPatternsInsideCurrentDirectory; /** * Returns patterns to be expanded relative to (outside) the current directory. * * @example * // ['../*', './../*'] * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) */ function getPatternsOutsideCurrentDirectory(patterns) { return patterns.filter(isPatternRelatedToParentDirectory); } pattern$1.getPatternsOutsideCurrentDirectory = getPatternsOutsideCurrentDirectory; function isPatternRelatedToParentDirectory(pattern) { return pattern.startsWith('..') || pattern.startsWith('./..'); } pattern$1.isPatternRelatedToParentDirectory = isPatternRelatedToParentDirectory; function getBaseDirectory(pattern) { return globParent$1(pattern, { flipBackslashes: false }); } pattern$1.getBaseDirectory = getBaseDirectory; function hasGlobStar(pattern) { return pattern.includes(GLOBSTAR$1); } pattern$1.hasGlobStar = hasGlobStar; function endsWithSlashGlobStar(pattern) { return pattern.endsWith('/' + GLOBSTAR$1); } pattern$1.endsWithSlashGlobStar = endsWithSlashGlobStar; function isAffectDepthOfReadingPattern(pattern) { const basename = path$f.basename(pattern); return endsWithSlashGlobStar(pattern) || isStaticPattern(basename); } pattern$1.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern; function expandPatternsWithBraceExpansion(patterns) { return patterns.reduce((collection, pattern) => { return collection.concat(expandBraceExpansion(pattern)); }, []); } pattern$1.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion; function expandBraceExpansion(pattern) { const patterns = micromatch.braces(pattern, { expand: true, nodupes: true, keepEscaping: true }); /** * Sort the patterns by length so that the same depth patterns are processed side by side. * `a/{b,}/{c,}/*` – `['a///*', 'a/b//*', 'a//c/*', 'a/b/c/*']` */ patterns.sort((a, b) => a.length - b.length); /** * Micromatch can return an empty string in the case of patterns like `{a,}`. */ return patterns.filter((pattern) => pattern !== ''); } pattern$1.expandBraceExpansion = expandBraceExpansion; function getPatternParts(pattern, options) { let { parts } = micromatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true })); /** * The scan method returns an empty array in some cases. * See micromatch/picomatch#58 for more details. */ if (parts.length === 0) { parts = [pattern]; } /** * The scan method does not return an empty part for the pattern with a forward slash. * This is another part of micromatch/picomatch#58. */ if (parts[0].startsWith('/')) { parts[0] = parts[0].slice(1); parts.unshift(''); } return parts; } pattern$1.getPatternParts = getPatternParts; function makeRe(pattern, options) { return micromatch.makeRe(pattern, options); } pattern$1.makeRe = makeRe; function convertPatternsToRe(patterns, options) { return patterns.map((pattern) => makeRe(pattern, options)); } pattern$1.convertPatternsToRe = convertPatternsToRe; function matchAny(entry, patternsRe) { return patternsRe.some((patternRe) => patternRe.test(entry)); } pattern$1.matchAny = matchAny; /** * This package only works with forward slashes as a path separator. * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. */ function removeDuplicateSlashes(pattern) { return pattern.replace(DOUBLE_SLASH_RE$1, '/'); } pattern$1.removeDuplicateSlashes = removeDuplicateSlashes; var stream$4 = {}; /* * merge2 * https://github.com/teambition/merge2 * * Copyright (c) 2014-2020 Teambition * Licensed under the MIT license. */ const Stream = require$$0$7; const PassThrough = Stream.PassThrough; const slice = Array.prototype.slice; var merge2_1 = merge2$1; function merge2$1 () { const streamsQueue = []; const args = slice.call(arguments); let merging = false; let options = args[args.length - 1]; if (options && !Array.isArray(options) && options.pipe == null) { args.pop(); } else { options = {}; } const doEnd = options.end !== false; const doPipeError = options.pipeError === true; if (options.objectMode == null) { options.objectMode = true; } if (options.highWaterMark == null) { options.highWaterMark = 64 * 1024; } const mergedStream = PassThrough(options); function addStream () { for (let i = 0, len = arguments.length; i < len; i++) { streamsQueue.push(pauseStreams(arguments[i], options)); } mergeStream(); return this } function mergeStream () { if (merging) { return } merging = true; let streams = streamsQueue.shift(); if (!streams) { process.nextTick(endStream); return } if (!Array.isArray(streams)) { streams = [streams]; } let pipesCount = streams.length + 1; function next () { if (--pipesCount > 0) { return } merging = false; mergeStream(); } function pipe (stream) { function onend () { stream.removeListener('merge2UnpipeEnd', onend); stream.removeListener('end', onend); if (doPipeError) { stream.removeListener('error', onerror); } next(); } function onerror (err) { mergedStream.emit('error', err); } // skip ended stream if (stream._readableState.endEmitted) { return next() } stream.on('merge2UnpipeEnd', onend); stream.on('end', onend); if (doPipeError) { stream.on('error', onerror); } stream.pipe(mergedStream, { end: false }); // compatible for old stream stream.resume(); } for (let i = 0; i < streams.length; i++) { pipe(streams[i]); } next(); } function endStream () { merging = false; // emit 'queueDrain' when all streams merged. mergedStream.emit('queueDrain'); if (doEnd) { mergedStream.end(); } } mergedStream.setMaxListeners(0); mergedStream.add = addStream; mergedStream.on('unpipe', function (stream) { stream.emit('merge2UnpipeEnd'); }); if (args.length) { addStream.apply(null, args); } return mergedStream } // check and pause streams for pipe. function pauseStreams (streams, options) { if (!Array.isArray(streams)) { // Backwards-compat with old-style streams if (!streams._readableState && streams.pipe) { streams = streams.pipe(PassThrough(options)); } if (!streams._readableState || !streams.pause || !streams.pipe) { throw new Error('Only readable stream can be merged.') } streams.pause(); } else { for (let i = 0, len = streams.length; i < len; i++) { streams[i] = pauseStreams(streams[i], options); } } return streams } Object.defineProperty(stream$4, "__esModule", { value: true }); stream$4.merge = void 0; const merge2 = merge2_1; function merge$1(streams) { const mergedStream = merge2(streams); streams.forEach((stream) => { stream.once('error', (error) => mergedStream.emit('error', error)); }); mergedStream.once('close', () => propagateCloseEventToSources(streams)); mergedStream.once('end', () => propagateCloseEventToSources(streams)); return mergedStream; } stream$4.merge = merge$1; function propagateCloseEventToSources(streams) { streams.forEach((stream) => stream.emit('close')); } var string$2 = {}; Object.defineProperty(string$2, "__esModule", { value: true }); string$2.isEmpty = string$2.isString = void 0; function isString(input) { return typeof input === 'string'; } string$2.isString = isString; function isEmpty$1(input) { return input === ''; } string$2.isEmpty = isEmpty$1; Object.defineProperty(utils$g, "__esModule", { value: true }); utils$g.string = utils$g.stream = utils$g.pattern = utils$g.path = utils$g.fs = utils$g.errno = utils$g.array = void 0; const array = array$1; utils$g.array = array; const errno = errno$1; utils$g.errno = errno; const fs$g = fs$h; utils$g.fs = fs$g; const path$e = path$h; utils$g.path = path$e; const pattern = pattern$1; utils$g.pattern = pattern; const stream$3 = stream$4; utils$g.stream = stream$3; const string$1 = string$2; utils$g.string = string$1; Object.defineProperty(tasks, "__esModule", { value: true }); tasks.convertPatternGroupToTask = tasks.convertPatternGroupsToTasks = tasks.groupPatternsByBaseDirectory = tasks.getNegativePatternsAsPositive = tasks.getPositivePatterns = tasks.convertPatternsToTasks = tasks.generate = void 0; const utils$a = utils$g; function generate(input, settings) { const patterns = processPatterns(input, settings); const ignore = processPatterns(settings.ignore, settings); const positivePatterns = getPositivePatterns(patterns); const negativePatterns = getNegativePatternsAsPositive(patterns, ignore); const staticPatterns = positivePatterns.filter((pattern) => utils$a.pattern.isStaticPattern(pattern, settings)); const dynamicPatterns = positivePatterns.filter((pattern) => utils$a.pattern.isDynamicPattern(pattern, settings)); const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false); const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true); return staticTasks.concat(dynamicTasks); } tasks.generate = generate; function processPatterns(input, settings) { let patterns = input; /** * The original pattern like `{,*,**,a/*}` can lead to problems checking the depth when matching entry * and some problems with the micromatch package (see fast-glob issues: #365, #394). * * To solve this problem, we expand all patterns containing brace expansion. This can lead to a slight slowdown * in matching in the case of a large set of patterns after expansion. */ if (settings.braceExpansion) { patterns = utils$a.pattern.expandPatternsWithBraceExpansion(patterns); } /** * If the `baseNameMatch` option is enabled, we must add globstar to patterns, so that they can be used * at any nesting level. * * We do this here, because otherwise we have to complicate the filtering logic. For example, we need to change * the pattern in the filter before creating a regular expression. There is no need to change the patterns * in the application. Only on the input. */ if (settings.baseNameMatch) { patterns = patterns.map((pattern) => pattern.includes('/') ? pattern : `**/${pattern}`); } /** * This method also removes duplicate slashes that may have been in the pattern or formed as a result of expansion. */ return patterns.map((pattern) => utils$a.pattern.removeDuplicateSlashes(pattern)); } /** * Returns tasks grouped by basic pattern directories. * * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. * This is necessary because directory traversal starts at the base directory and goes deeper. */ function convertPatternsToTasks(positive, negative, dynamic) { const tasks = []; const patternsOutsideCurrentDirectory = utils$a.pattern.getPatternsOutsideCurrentDirectory(positive); const patternsInsideCurrentDirectory = utils$a.pattern.getPatternsInsideCurrentDirectory(positive); const outsideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsOutsideCurrentDirectory); const insideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsInsideCurrentDirectory); tasks.push(...convertPatternGroupsToTasks(outsideCurrentDirectoryGroup, negative, dynamic)); /* * For the sake of reducing future accesses to the file system, we merge all tasks within the current directory * into a global task, if at least one pattern refers to the root (`.`). In this case, the global task covers the rest. */ if ('.' in insideCurrentDirectoryGroup) { tasks.push(convertPatternGroupToTask('.', patternsInsideCurrentDirectory, negative, dynamic)); } else { tasks.push(...convertPatternGroupsToTasks(insideCurrentDirectoryGroup, negative, dynamic)); } return tasks; } tasks.convertPatternsToTasks = convertPatternsToTasks; function getPositivePatterns(patterns) { return utils$a.pattern.getPositivePatterns(patterns); } tasks.getPositivePatterns = getPositivePatterns; function getNegativePatternsAsPositive(patterns, ignore) { const negative = utils$a.pattern.getNegativePatterns(patterns).concat(ignore); const positive = negative.map(utils$a.pattern.convertToPositivePattern); return positive; } tasks.getNegativePatternsAsPositive = getNegativePatternsAsPositive; function groupPatternsByBaseDirectory(patterns) { const group = {}; return patterns.reduce((collection, pattern) => { const base = utils$a.pattern.getBaseDirectory(pattern); if (base in collection) { collection[base].push(pattern); } else { collection[base] = [pattern]; } return collection; }, group); } tasks.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory; function convertPatternGroupsToTasks(positive, negative, dynamic) { return Object.keys(positive).map((base) => { return convertPatternGroupToTask(base, positive[base], negative, dynamic); }); } tasks.convertPatternGroupsToTasks = convertPatternGroupsToTasks; function convertPatternGroupToTask(base, positive, negative, dynamic) { return { dynamic, positive, negative, base, patterns: [].concat(positive, negative.map(utils$a.pattern.convertToNegativePattern)) }; } tasks.convertPatternGroupToTask = convertPatternGroupToTask; var async$7 = {}; var async$6 = {}; var out$3 = {}; var async$5 = {}; var async$4 = {}; var out$2 = {}; var async$3 = {}; var out$1 = {}; var async$2 = {}; Object.defineProperty(async$2, "__esModule", { value: true }); async$2.read = void 0; function read$3(path, settings, callback) { settings.fs.lstat(path, (lstatError, lstat) => { if (lstatError !== null) { callFailureCallback$2(callback, lstatError); return; } if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { callSuccessCallback$2(callback, lstat); return; } settings.fs.stat(path, (statError, stat) => { if (statError !== null) { if (settings.throwErrorOnBrokenSymbolicLink) { callFailureCallback$2(callback, statError); return; } callSuccessCallback$2(callback, lstat); return; } if (settings.markSymbolicLink) { stat.isSymbolicLink = () => true; } callSuccessCallback$2(callback, stat); }); }); } async$2.read = read$3; function callFailureCallback$2(callback, error) { callback(error); } function callSuccessCallback$2(callback, result) { callback(null, result); } var sync$8 = {}; Object.defineProperty(sync$8, "__esModule", { value: true }); sync$8.read = void 0; function read$2(path, settings) { const lstat = settings.fs.lstatSync(path); if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { return lstat; } try { const stat = settings.fs.statSync(path); if (settings.markSymbolicLink) { stat.isSymbolicLink = () => true; } return stat; } catch (error) { if (!settings.throwErrorOnBrokenSymbolicLink) { return lstat; } throw error; } } sync$8.read = read$2; var settings$3 = {}; var fs$f = {}; (function (exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; const fs = require$$0__default; exports.FILE_SYSTEM_ADAPTER = { lstat: fs.lstat, stat: fs.stat, lstatSync: fs.lstatSync, statSync: fs.statSync }; function createFileSystemAdapter(fsMethods) { if (fsMethods === undefined) { return exports.FILE_SYSTEM_ADAPTER; } return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); } exports.createFileSystemAdapter = createFileSystemAdapter; } (fs$f)); Object.defineProperty(settings$3, "__esModule", { value: true }); const fs$e = fs$f; let Settings$2 = class Settings { constructor(_options = {}) { this._options = _options; this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true); this.fs = fs$e.createFileSystemAdapter(this._options.fs); this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false); this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); } _getValue(option, value) { return option !== null && option !== void 0 ? option : value; } }; settings$3.default = Settings$2; Object.defineProperty(out$1, "__esModule", { value: true }); out$1.statSync = out$1.stat = out$1.Settings = void 0; const async$1 = async$2; const sync$7 = sync$8; const settings_1$3 = settings$3; out$1.Settings = settings_1$3.default; function stat$4(path, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === 'function') { async$1.read(path, getSettings$2(), optionsOrSettingsOrCallback); return; } async$1.read(path, getSettings$2(optionsOrSettingsOrCallback), callback); } out$1.stat = stat$4; function statSync(path, optionsOrSettings) { const settings = getSettings$2(optionsOrSettings); return sync$7.read(path, settings); } out$1.statSync = statSync; function getSettings$2(settingsOrOptions = {}) { if (settingsOrOptions instanceof settings_1$3.default) { return settingsOrOptions; } return new settings_1$3.default(settingsOrOptions); } /*! queue-microtask. MIT License. Feross Aboukhadijeh */ let promise$1; var queueMicrotask_1 = typeof queueMicrotask === 'function' ? queueMicrotask.bind(typeof window !== 'undefined' ? window : commonjsGlobal) // reuse resolved promise, and allocate it lazily : cb => (promise$1 || (promise$1 = Promise.resolve())) .then(cb) .catch(err => setTimeout(() => { throw err }, 0)); /*! run-parallel. MIT License. Feross Aboukhadijeh */ var runParallel_1 = runParallel; const queueMicrotask$1 = queueMicrotask_1; function runParallel (tasks, cb) { let results, pending, keys; let isSync = true; if (Array.isArray(tasks)) { results = []; pending = tasks.length; } else { keys = Object.keys(tasks); results = {}; pending = keys.length; } function done (err) { function end () { if (cb) cb(err, results); cb = null; } if (isSync) queueMicrotask$1(end); else end(); } function each (i, err, result) { results[i] = result; if (--pending === 0 || err) { done(err); } } if (!pending) { // empty done(null); } else if (keys) { // object keys.forEach(function (key) { tasks[key](function (err, result) { each(key, err, result); }); }); } else { // array tasks.forEach(function (task, i) { task(function (err, result) { each(i, err, result); }); }); } isSync = false; } var constants$2 = {}; Object.defineProperty(constants$2, "__esModule", { value: true }); constants$2.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0; const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.'); if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) { throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`); } const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); const SUPPORTED_MAJOR_VERSION = 10; const SUPPORTED_MINOR_VERSION = 10; const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION; const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION; /** * IS `true` for Node.js 10.10 and greater. */ constants$2.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR; var utils$9 = {}; var fs$d = {}; Object.defineProperty(fs$d, "__esModule", { value: true }); fs$d.createDirentFromStats = void 0; class DirentFromStats { constructor(name, stats) { this.name = name; this.isBlockDevice = stats.isBlockDevice.bind(stats); this.isCharacterDevice = stats.isCharacterDevice.bind(stats); this.isDirectory = stats.isDirectory.bind(stats); this.isFIFO = stats.isFIFO.bind(stats); this.isFile = stats.isFile.bind(stats); this.isSocket = stats.isSocket.bind(stats); this.isSymbolicLink = stats.isSymbolicLink.bind(stats); } } function createDirentFromStats(name, stats) { return new DirentFromStats(name, stats); } fs$d.createDirentFromStats = createDirentFromStats; Object.defineProperty(utils$9, "__esModule", { value: true }); utils$9.fs = void 0; const fs$c = fs$d; utils$9.fs = fs$c; var common$a = {}; Object.defineProperty(common$a, "__esModule", { value: true }); common$a.joinPathSegments = void 0; function joinPathSegments$1(a, b, separator) { /** * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). */ if (a.endsWith(separator)) { return a + b; } return a + separator + b; } common$a.joinPathSegments = joinPathSegments$1; Object.defineProperty(async$3, "__esModule", { value: true }); async$3.readdir = async$3.readdirWithFileTypes = async$3.read = void 0; const fsStat$5 = out$1; const rpl = runParallel_1; const constants_1$1 = constants$2; const utils$8 = utils$9; const common$9 = common$a; function read$1(directory, settings, callback) { if (!settings.stats && constants_1$1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { readdirWithFileTypes$1(directory, settings, callback); return; } readdir$3(directory, settings, callback); } async$3.read = read$1; function readdirWithFileTypes$1(directory, settings, callback) { settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => { if (readdirError !== null) { callFailureCallback$1(callback, readdirError); return; } const entries = dirents.map((dirent) => ({ dirent, name: dirent.name, path: common$9.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) })); if (!settings.followSymbolicLinks) { callSuccessCallback$1(callback, entries); return; } const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings)); rpl(tasks, (rplError, rplEntries) => { if (rplError !== null) { callFailureCallback$1(callback, rplError); return; } callSuccessCallback$1(callback, rplEntries); }); }); } async$3.readdirWithFileTypes = readdirWithFileTypes$1; function makeRplTaskEntry(entry, settings) { return (done) => { if (!entry.dirent.isSymbolicLink()) { done(null, entry); return; } settings.fs.stat(entry.path, (statError, stats) => { if (statError !== null) { if (settings.throwErrorOnBrokenSymbolicLink) { done(statError); return; } done(null, entry); return; } entry.dirent = utils$8.fs.createDirentFromStats(entry.name, stats); done(null, entry); }); }; } function readdir$3(directory, settings, callback) { settings.fs.readdir(directory, (readdirError, names) => { if (readdirError !== null) { callFailureCallback$1(callback, readdirError); return; } const tasks = names.map((name) => { const path = common$9.joinPathSegments(directory, name, settings.pathSegmentSeparator); return (done) => { fsStat$5.stat(path, settings.fsStatSettings, (error, stats) => { if (error !== null) { done(error); return; } const entry = { name, path, dirent: utils$8.fs.createDirentFromStats(name, stats) }; if (settings.stats) { entry.stats = stats; } done(null, entry); }); }; }); rpl(tasks, (rplError, entries) => { if (rplError !== null) { callFailureCallback$1(callback, rplError); return; } callSuccessCallback$1(callback, entries); }); }); } async$3.readdir = readdir$3; function callFailureCallback$1(callback, error) { callback(error); } function callSuccessCallback$1(callback, result) { callback(null, result); } var sync$6 = {}; Object.defineProperty(sync$6, "__esModule", { value: true }); sync$6.readdir = sync$6.readdirWithFileTypes = sync$6.read = void 0; const fsStat$4 = out$1; const constants_1 = constants$2; const utils$7 = utils$9; const common$8 = common$a; function read(directory, settings) { if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { return readdirWithFileTypes(directory, settings); } return readdir$2(directory, settings); } sync$6.read = read; function readdirWithFileTypes(directory, settings) { const dirents = settings.fs.readdirSync(directory, { withFileTypes: true }); return dirents.map((dirent) => { const entry = { dirent, name: dirent.name, path: common$8.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) }; if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) { try { const stats = settings.fs.statSync(entry.path); entry.dirent = utils$7.fs.createDirentFromStats(entry.name, stats); } catch (error) { if (settings.throwErrorOnBrokenSymbolicLink) { throw error; } } } return entry; }); } sync$6.readdirWithFileTypes = readdirWithFileTypes; function readdir$2(directory, settings) { const names = settings.fs.readdirSync(directory); return names.map((name) => { const entryPath = common$8.joinPathSegments(directory, name, settings.pathSegmentSeparator); const stats = fsStat$4.statSync(entryPath, settings.fsStatSettings); const entry = { name, path: entryPath, dirent: utils$7.fs.createDirentFromStats(name, stats) }; if (settings.stats) { entry.stats = stats; } return entry; }); } sync$6.readdir = readdir$2; var settings$2 = {}; var fs$b = {}; (function (exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; const fs = require$$0__default; exports.FILE_SYSTEM_ADAPTER = { lstat: fs.lstat, stat: fs.stat, lstatSync: fs.lstatSync, statSync: fs.statSync, readdir: fs.readdir, readdirSync: fs.readdirSync }; function createFileSystemAdapter(fsMethods) { if (fsMethods === undefined) { return exports.FILE_SYSTEM_ADAPTER; } return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); } exports.createFileSystemAdapter = createFileSystemAdapter; } (fs$b)); Object.defineProperty(settings$2, "__esModule", { value: true }); const path$d = require$$0$4; const fsStat$3 = out$1; const fs$a = fs$b; let Settings$1 = class Settings { constructor(_options = {}) { this._options = _options; this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false); this.fs = fs$a.createFileSystemAdapter(this._options.fs); this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path$d.sep); this.stats = this._getValue(this._options.stats, false); this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); this.fsStatSettings = new fsStat$3.Settings({ followSymbolicLink: this.followSymbolicLinks, fs: this.fs, throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink }); } _getValue(option, value) { return option !== null && option !== void 0 ? option : value; } }; settings$2.default = Settings$1; Object.defineProperty(out$2, "__esModule", { value: true }); out$2.Settings = out$2.scandirSync = out$2.scandir = void 0; const async = async$3; const sync$5 = sync$6; const settings_1$2 = settings$2; out$2.Settings = settings_1$2.default; function scandir(path, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === 'function') { async.read(path, getSettings$1(), optionsOrSettingsOrCallback); return; } async.read(path, getSettings$1(optionsOrSettingsOrCallback), callback); } out$2.scandir = scandir; function scandirSync(path, optionsOrSettings) { const settings = getSettings$1(optionsOrSettings); return sync$5.read(path, settings); } out$2.scandirSync = scandirSync; function getSettings$1(settingsOrOptions = {}) { if (settingsOrOptions instanceof settings_1$2.default) { return settingsOrOptions; } return new settings_1$2.default(settingsOrOptions); } var queue = {exports: {}}; function reusify$1 (Constructor) { var head = new Constructor(); var tail = head; function get () { var current = head; if (current.next) { head = current.next; } else { head = new Constructor(); tail = head; } current.next = null; return current } function release (obj) { tail.next = obj; tail = obj; } return { get: get, release: release } } var reusify_1 = reusify$1; /* eslint-disable no-var */ var reusify = reusify_1; function fastqueue (context, worker, concurrency) { if (typeof context === 'function') { concurrency = worker; worker = context; context = null; } if (concurrency < 1) { throw new Error('fastqueue concurrency must be greater than 1') } var cache = reusify(Task); var queueHead = null; var queueTail = null; var _running = 0; var errorHandler = null; var self = { push: push, drain: noop$4, saturated: noop$4, pause: pause, paused: false, concurrency: concurrency, running: running, resume: resume, idle: idle, length: length, getQueue: getQueue, unshift: unshift, empty: noop$4, kill: kill, killAndDrain: killAndDrain, error: error }; return self function running () { return _running } function pause () { self.paused = true; } function length () { var current = queueHead; var counter = 0; while (current) { current = current.next; counter++; } return counter } function getQueue () { var current = queueHead; var tasks = []; while (current) { tasks.push(current.value); current = current.next; } return tasks } function resume () { if (!self.paused) return self.paused = false; for (var i = 0; i < self.concurrency; i++) { _running++; release(); } } function idle () { return _running === 0 && self.length() === 0 } function push (value, done) { var current = cache.get(); current.context = context; current.release = release; current.value = value; current.callback = done || noop$4; current.errorHandler = errorHandler; if (_running === self.concurrency || self.paused) { if (queueTail) { queueTail.next = current; queueTail = current; } else { queueHead = current; queueTail = current; self.saturated(); } } else { _running++; worker.call(context, current.value, current.worked); } } function unshift (value, done) { var current = cache.get(); current.context = context; current.release = release; current.value = value; current.callback = done || noop$4; if (_running === self.concurrency || self.paused) { if (queueHead) { current.next = queueHead; queueHead = current; } else { queueHead = current; queueTail = current; self.saturated(); } } else { _running++; worker.call(context, current.value, current.worked); } } function release (holder) { if (holder) { cache.release(holder); } var next = queueHead; if (next) { if (!self.paused) { if (queueTail === queueHead) { queueTail = null; } queueHead = next.next; next.next = null; worker.call(context, next.value, next.worked); if (queueTail === null) { self.empty(); } } else { _running--; } } else if (--_running === 0) { self.drain(); } } function kill () { queueHead = null; queueTail = null; self.drain = noop$4; } function killAndDrain () { queueHead = null; queueTail = null; self.drain(); self.drain = noop$4; } function error (handler) { errorHandler = handler; } } function noop$4 () {} function Task () { this.value = null; this.callback = noop$4; this.next = null; this.release = noop$4; this.context = null; this.errorHandler = null; var self = this; this.worked = function worked (err, result) { var callback = self.callback; var errorHandler = self.errorHandler; var val = self.value; self.value = null; self.callback = noop$4; if (self.errorHandler) { errorHandler(err, val); } callback.call(self.context, err, result); self.release(self); }; } function queueAsPromised (context, worker, concurrency) { if (typeof context === 'function') { concurrency = worker; worker = context; context = null; } function asyncWrapper (arg, cb) { worker.call(this, arg) .then(function (res) { cb(null, res); }, cb); } var queue = fastqueue(context, asyncWrapper, concurrency); var pushCb = queue.push; var unshiftCb = queue.unshift; queue.push = push; queue.unshift = unshift; queue.drained = drained; return queue function push (value) { var p = new Promise(function (resolve, reject) { pushCb(value, function (err, result) { if (err) { reject(err); return } resolve(result); }); }); // Let's fork the promise chain to // make the error bubble up to the user but // not lead to a unhandledRejection p.catch(noop$4); return p } function unshift (value) { var p = new Promise(function (resolve, reject) { unshiftCb(value, function (err, result) { if (err) { reject(err); return } resolve(result); }); }); // Let's fork the promise chain to // make the error bubble up to the user but // not lead to a unhandledRejection p.catch(noop$4); return p } function drained () { var previousDrain = queue.drain; var p = new Promise(function (resolve) { queue.drain = function () { previousDrain(); resolve(); }; }); return p } } queue.exports = fastqueue; queue.exports.promise = queueAsPromised; var queueExports = queue.exports; var common$7 = {}; Object.defineProperty(common$7, "__esModule", { value: true }); common$7.joinPathSegments = common$7.replacePathSegmentSeparator = common$7.isAppliedFilter = common$7.isFatalError = void 0; function isFatalError(settings, error) { if (settings.errorFilter === null) { return true; } return !settings.errorFilter(error); } common$7.isFatalError = isFatalError; function isAppliedFilter(filter, value) { return filter === null || filter(value); } common$7.isAppliedFilter = isAppliedFilter; function replacePathSegmentSeparator(filepath, separator) { return filepath.split(/[/\\]/).join(separator); } common$7.replacePathSegmentSeparator = replacePathSegmentSeparator; function joinPathSegments(a, b, separator) { if (a === '') { return b; } /** * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). */ if (a.endsWith(separator)) { return a + b; } return a + separator + b; } common$7.joinPathSegments = joinPathSegments; var reader$1 = {}; Object.defineProperty(reader$1, "__esModule", { value: true }); const common$6 = common$7; let Reader$1 = class Reader { constructor(_root, _settings) { this._root = _root; this._settings = _settings; this._root = common$6.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator); } }; reader$1.default = Reader$1; Object.defineProperty(async$4, "__esModule", { value: true }); const events_1 = require$$0$5; const fsScandir$2 = out$2; const fastq = queueExports; const common$5 = common$7; const reader_1$4 = reader$1; class AsyncReader extends reader_1$4.default { constructor(_root, _settings) { super(_root, _settings); this._settings = _settings; this._scandir = fsScandir$2.scandir; this._emitter = new events_1.EventEmitter(); this._queue = fastq(this._worker.bind(this), this._settings.concurrency); this._isFatalError = false; this._isDestroyed = false; this._queue.drain = () => { if (!this._isFatalError) { this._emitter.emit('end'); } }; } read() { this._isFatalError = false; this._isDestroyed = false; setImmediate(() => { this._pushToQueue(this._root, this._settings.basePath); }); return this._emitter; } get isDestroyed() { return this._isDestroyed; } destroy() { if (this._isDestroyed) { throw new Error('The reader is already destroyed'); } this._isDestroyed = true; this._queue.killAndDrain(); } onEntry(callback) { this._emitter.on('entry', callback); } onError(callback) { this._emitter.once('error', callback); } onEnd(callback) { this._emitter.once('end', callback); } _pushToQueue(directory, base) { const queueItem = { directory, base }; this._queue.push(queueItem, (error) => { if (error !== null) { this._handleError(error); } }); } _worker(item, done) { this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => { if (error !== null) { done(error, undefined); return; } for (const entry of entries) { this._handleEntry(entry, item.base); } done(null, undefined); }); } _handleError(error) { if (this._isDestroyed || !common$5.isFatalError(this._settings, error)) { return; } this._isFatalError = true; this._isDestroyed = true; this._emitter.emit('error', error); } _handleEntry(entry, base) { if (this._isDestroyed || this._isFatalError) { return; } const fullpath = entry.path; if (base !== undefined) { entry.path = common$5.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); } if (common$5.isAppliedFilter(this._settings.entryFilter, entry)) { this._emitEntry(entry); } if (entry.dirent.isDirectory() && common$5.isAppliedFilter(this._settings.deepFilter, entry)) { this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); } } _emitEntry(entry) { this._emitter.emit('entry', entry); } } async$4.default = AsyncReader; Object.defineProperty(async$5, "__esModule", { value: true }); const async_1$4 = async$4; class AsyncProvider { constructor(_root, _settings) { this._root = _root; this._settings = _settings; this._reader = new async_1$4.default(this._root, this._settings); this._storage = []; } read(callback) { this._reader.onError((error) => { callFailureCallback(callback, error); }); this._reader.onEntry((entry) => { this._storage.push(entry); }); this._reader.onEnd(() => { callSuccessCallback(callback, this._storage); }); this._reader.read(); } } async$5.default = AsyncProvider; function callFailureCallback(callback, error) { callback(error); } function callSuccessCallback(callback, entries) { callback(null, entries); } var stream$2 = {}; Object.defineProperty(stream$2, "__esModule", { value: true }); const stream_1$5 = require$$0$7; const async_1$3 = async$4; class StreamProvider { constructor(_root, _settings) { this._root = _root; this._settings = _settings; this._reader = new async_1$3.default(this._root, this._settings); this._stream = new stream_1$5.Readable({ objectMode: true, read: () => { }, destroy: () => { if (!this._reader.isDestroyed) { this._reader.destroy(); } } }); } read() { this._reader.onError((error) => { this._stream.emit('error', error); }); this._reader.onEntry((entry) => { this._stream.push(entry); }); this._reader.onEnd(() => { this._stream.push(null); }); this._reader.read(); return this._stream; } } stream$2.default = StreamProvider; var sync$4 = {}; var sync$3 = {}; Object.defineProperty(sync$3, "__esModule", { value: true }); const fsScandir$1 = out$2; const common$4 = common$7; const reader_1$3 = reader$1; class SyncReader extends reader_1$3.default { constructor() { super(...arguments); this._scandir = fsScandir$1.scandirSync; this._storage = []; this._queue = new Set(); } read() { this._pushToQueue(this._root, this._settings.basePath); this._handleQueue(); return this._storage; } _pushToQueue(directory, base) { this._queue.add({ directory, base }); } _handleQueue() { for (const item of this._queue.values()) { this._handleDirectory(item.directory, item.base); } } _handleDirectory(directory, base) { try { const entries = this._scandir(directory, this._settings.fsScandirSettings); for (const entry of entries) { this._handleEntry(entry, base); } } catch (error) { this._handleError(error); } } _handleError(error) { if (!common$4.isFatalError(this._settings, error)) { return; } throw error; } _handleEntry(entry, base) { const fullpath = entry.path; if (base !== undefined) { entry.path = common$4.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); } if (common$4.isAppliedFilter(this._settings.entryFilter, entry)) { this._pushToStorage(entry); } if (entry.dirent.isDirectory() && common$4.isAppliedFilter(this._settings.deepFilter, entry)) { this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); } } _pushToStorage(entry) { this._storage.push(entry); } } sync$3.default = SyncReader; Object.defineProperty(sync$4, "__esModule", { value: true }); const sync_1$3 = sync$3; class SyncProvider { constructor(_root, _settings) { this._root = _root; this._settings = _settings; this._reader = new sync_1$3.default(this._root, this._settings); } read() { return this._reader.read(); } } sync$4.default = SyncProvider; var settings$1 = {}; Object.defineProperty(settings$1, "__esModule", { value: true }); const path$c = require$$0$4; const fsScandir = out$2; class Settings { constructor(_options = {}) { this._options = _options; this.basePath = this._getValue(this._options.basePath, undefined); this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY); this.deepFilter = this._getValue(this._options.deepFilter, null); this.entryFilter = this._getValue(this._options.entryFilter, null); this.errorFilter = this._getValue(this._options.errorFilter, null); this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path$c.sep); this.fsScandirSettings = new fsScandir.Settings({ followSymbolicLinks: this._options.followSymbolicLinks, fs: this._options.fs, pathSegmentSeparator: this._options.pathSegmentSeparator, stats: this._options.stats, throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink }); } _getValue(option, value) { return option !== null && option !== void 0 ? option : value; } } settings$1.default = Settings; Object.defineProperty(out$3, "__esModule", { value: true }); out$3.Settings = out$3.walkStream = out$3.walkSync = out$3.walk = void 0; const async_1$2 = async$5; const stream_1$4 = stream$2; const sync_1$2 = sync$4; const settings_1$1 = settings$1; out$3.Settings = settings_1$1.default; function walk$2(directory, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === 'function') { new async_1$2.default(directory, getSettings()).read(optionsOrSettingsOrCallback); return; } new async_1$2.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback); } out$3.walk = walk$2; function walkSync(directory, optionsOrSettings) { const settings = getSettings(optionsOrSettings); const provider = new sync_1$2.default(directory, settings); return provider.read(); } out$3.walkSync = walkSync; function walkStream(directory, optionsOrSettings) { const settings = getSettings(optionsOrSettings); const provider = new stream_1$4.default(directory, settings); return provider.read(); } out$3.walkStream = walkStream; function getSettings(settingsOrOptions = {}) { if (settingsOrOptions instanceof settings_1$1.default) { return settingsOrOptions; } return new settings_1$1.default(settingsOrOptions); } var reader = {}; Object.defineProperty(reader, "__esModule", { value: true }); const path$b = require$$0$4; const fsStat$2 = out$1; const utils$6 = utils$g; class Reader { constructor(_settings) { this._settings = _settings; this._fsStatSettings = new fsStat$2.Settings({ followSymbolicLink: this._settings.followSymbolicLinks, fs: this._settings.fs, throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks }); } _getFullEntryPath(filepath) { return path$b.resolve(this._settings.cwd, filepath); } _makeEntry(stats, pattern) { const entry = { name: pattern, path: pattern, dirent: utils$6.fs.createDirentFromStats(pattern, stats) }; if (this._settings.stats) { entry.stats = stats; } return entry; } _isFatalError(error) { return !utils$6.errno.isEnoentCodeError(error) && !this._settings.suppressErrors; } } reader.default = Reader; var stream$1 = {}; Object.defineProperty(stream$1, "__esModule", { value: true }); const stream_1$3 = require$$0$7; const fsStat$1 = out$1; const fsWalk$2 = out$3; const reader_1$2 = reader; class ReaderStream extends reader_1$2.default { constructor() { super(...arguments); this._walkStream = fsWalk$2.walkStream; this._stat = fsStat$1.stat; } dynamic(root, options) { return this._walkStream(root, options); } static(patterns, options) { const filepaths = patterns.map(this._getFullEntryPath, this); const stream = new stream_1$3.PassThrough({ objectMode: true }); stream._write = (index, _enc, done) => { return this._getEntry(filepaths[index], patterns[index], options) .then((entry) => { if (entry !== null && options.entryFilter(entry)) { stream.push(entry); } if (index === filepaths.length - 1) { stream.end(); } done(); }) .catch(done); }; for (let i = 0; i < filepaths.length; i++) { stream.write(i); } return stream; } _getEntry(filepath, pattern, options) { return this._getStat(filepath) .then((stats) => this._makeEntry(stats, pattern)) .catch((error) => { if (options.errorFilter(error)) { return null; } throw error; }); } _getStat(filepath) { return new Promise((resolve, reject) => { this._stat(filepath, this._fsStatSettings, (error, stats) => { return error === null ? resolve(stats) : reject(error); }); }); } } stream$1.default = ReaderStream; Object.defineProperty(async$6, "__esModule", { value: true }); const fsWalk$1 = out$3; const reader_1$1 = reader; const stream_1$2 = stream$1; class ReaderAsync extends reader_1$1.default { constructor() { super(...arguments); this._walkAsync = fsWalk$1.walk; this._readerStream = new stream_1$2.default(this._settings); } dynamic(root, options) { return new Promise((resolve, reject) => { this._walkAsync(root, options, (error, entries) => { if (error === null) { resolve(entries); } else { reject(error); } }); }); } async static(patterns, options) { const entries = []; const stream = this._readerStream.static(patterns, options); // After #235, replace it with an asynchronous iterator. return new Promise((resolve, reject) => { stream.once('error', reject); stream.on('data', (entry) => entries.push(entry)); stream.once('end', () => resolve(entries)); }); } } async$6.default = ReaderAsync; var provider = {}; var deep = {}; var partial = {}; var matcher = {}; Object.defineProperty(matcher, "__esModule", { value: true }); const utils$5 = utils$g; class Matcher { constructor(_patterns, _settings, _micromatchOptions) { this._patterns = _patterns; this._settings = _settings; this._micromatchOptions = _micromatchOptions; this._storage = []; this._fillStorage(); } _fillStorage() { for (const pattern of this._patterns) { const segments = this._getPatternSegments(pattern); const sections = this._splitSegmentsIntoSections(segments); this._storage.push({ complete: sections.length <= 1, pattern, segments, sections }); } } _getPatternSegments(pattern) { const parts = utils$5.pattern.getPatternParts(pattern, this._micromatchOptions); return parts.map((part) => { const dynamic = utils$5.pattern.isDynamicPattern(part, this._settings); if (!dynamic) { return { dynamic: false, pattern: part }; } return { dynamic: true, pattern: part, patternRe: utils$5.pattern.makeRe(part, this._micromatchOptions) }; }); } _splitSegmentsIntoSections(segments) { return utils$5.array.splitWhen(segments, (segment) => segment.dynamic && utils$5.pattern.hasGlobStar(segment.pattern)); } } matcher.default = Matcher; Object.defineProperty(partial, "__esModule", { value: true }); const matcher_1 = matcher; class PartialMatcher extends matcher_1.default { match(filepath) { const parts = filepath.split('/'); const levels = parts.length; const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels); for (const pattern of patterns) { const section = pattern.sections[0]; /** * In this case, the pattern has a globstar and we must read all directories unconditionally, * but only if the level has reached the end of the first group. * * fixtures/{a,b}/** * ^ true/false ^ always true */ if (!pattern.complete && levels > section.length) { return true; } const match = parts.every((part, index) => { const segment = pattern.segments[index]; if (segment.dynamic && segment.patternRe.test(part)) { return true; } if (!segment.dynamic && segment.pattern === part) { return true; } return false; }); if (match) { return true; } } return false; } } partial.default = PartialMatcher; Object.defineProperty(deep, "__esModule", { value: true }); const utils$4 = utils$g; const partial_1 = partial; class DeepFilter { constructor(_settings, _micromatchOptions) { this._settings = _settings; this._micromatchOptions = _micromatchOptions; } getFilter(basePath, positive, negative) { const matcher = this._getMatcher(positive); const negativeRe = this._getNegativePatternsRe(negative); return (entry) => this._filter(basePath, entry, matcher, negativeRe); } _getMatcher(patterns) { return new partial_1.default(patterns, this._settings, this._micromatchOptions); } _getNegativePatternsRe(patterns) { const affectDepthOfReadingPatterns = patterns.filter(utils$4.pattern.isAffectDepthOfReadingPattern); return utils$4.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions); } _filter(basePath, entry, matcher, negativeRe) { if (this._isSkippedByDeep(basePath, entry.path)) { return false; } if (this._isSkippedSymbolicLink(entry)) { return false; } const filepath = utils$4.path.removeLeadingDotSegment(entry.path); if (this._isSkippedByPositivePatterns(filepath, matcher)) { return false; } return this._isSkippedByNegativePatterns(filepath, negativeRe); } _isSkippedByDeep(basePath, entryPath) { /** * Avoid unnecessary depth calculations when it doesn't matter. */ if (this._settings.deep === Infinity) { return false; } return this._getEntryLevel(basePath, entryPath) >= this._settings.deep; } _getEntryLevel(basePath, entryPath) { const entryPathDepth = entryPath.split('/').length; if (basePath === '') { return entryPathDepth; } const basePathDepth = basePath.split('/').length; return entryPathDepth - basePathDepth; } _isSkippedSymbolicLink(entry) { return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink(); } _isSkippedByPositivePatterns(entryPath, matcher) { return !this._settings.baseNameMatch && !matcher.match(entryPath); } _isSkippedByNegativePatterns(entryPath, patternsRe) { return !utils$4.pattern.matchAny(entryPath, patternsRe); } } deep.default = DeepFilter; var entry$1 = {}; Object.defineProperty(entry$1, "__esModule", { value: true }); const utils$3 = utils$g; class EntryFilter { constructor(_settings, _micromatchOptions) { this._settings = _settings; this._micromatchOptions = _micromatchOptions; this.index = new Map(); } getFilter(positive, negative) { const positiveRe = utils$3.pattern.convertPatternsToRe(positive, this._micromatchOptions); const negativeRe = utils$3.pattern.convertPatternsToRe(negative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true })); return (entry) => this._filter(entry, positiveRe, negativeRe); } _filter(entry, positiveRe, negativeRe) { const filepath = utils$3.path.removeLeadingDotSegment(entry.path); if (this._settings.unique && this._isDuplicateEntry(filepath)) { return false; } if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) { return false; } if (this._isSkippedByAbsoluteNegativePatterns(filepath, negativeRe)) { return false; } const isDirectory = entry.dirent.isDirectory(); const isMatched = this._isMatchToPatterns(filepath, positiveRe, isDirectory) && !this._isMatchToPatterns(filepath, negativeRe, isDirectory); if (this._settings.unique && isMatched) { this._createIndexRecord(filepath); } return isMatched; } _isDuplicateEntry(filepath) { return this.index.has(filepath); } _createIndexRecord(filepath) { this.index.set(filepath, undefined); } _onlyFileFilter(entry) { return this._settings.onlyFiles && !entry.dirent.isFile(); } _onlyDirectoryFilter(entry) { return this._settings.onlyDirectories && !entry.dirent.isDirectory(); } _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) { if (!this._settings.absolute) { return false; } const fullpath = utils$3.path.makeAbsolute(this._settings.cwd, entryPath); return utils$3.pattern.matchAny(fullpath, patternsRe); } _isMatchToPatterns(filepath, patternsRe, isDirectory) { // Trying to match files and directories by patterns. const isMatched = utils$3.pattern.matchAny(filepath, patternsRe); // A pattern with a trailling slash can be used for directory matching. // To apply such pattern, we need to add a tralling slash to the path. if (!isMatched && isDirectory) { return utils$3.pattern.matchAny(filepath + '/', patternsRe); } return isMatched; } } entry$1.default = EntryFilter; var error$1 = {}; Object.defineProperty(error$1, "__esModule", { value: true }); const utils$2 = utils$g; class ErrorFilter { constructor(_settings) { this._settings = _settings; } getFilter() { return (error) => this._isNonFatalError(error); } _isNonFatalError(error) { return utils$2.errno.isEnoentCodeError(error) || this._settings.suppressErrors; } } error$1.default = ErrorFilter; var entry = {}; Object.defineProperty(entry, "__esModule", { value: true }); const utils$1 = utils$g; class EntryTransformer { constructor(_settings) { this._settings = _settings; } getTransformer() { return (entry) => this._transform(entry); } _transform(entry) { let filepath = entry.path; if (this._settings.absolute) { filepath = utils$1.path.makeAbsolute(this._settings.cwd, filepath); filepath = utils$1.path.unixify(filepath); } if (this._settings.markDirectories && entry.dirent.isDirectory()) { filepath += '/'; } if (!this._settings.objectMode) { return filepath; } return Object.assign(Object.assign({}, entry), { path: filepath }); } } entry.default = EntryTransformer; Object.defineProperty(provider, "__esModule", { value: true }); const path$a = require$$0$4; const deep_1 = deep; const entry_1 = entry$1; const error_1 = error$1; const entry_2 = entry; class Provider { constructor(_settings) { this._settings = _settings; this.errorFilter = new error_1.default(this._settings); this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions()); this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions()); this.entryTransformer = new entry_2.default(this._settings); } _getRootDirectory(task) { return path$a.resolve(this._settings.cwd, task.base); } _getReaderOptions(task) { const basePath = task.base === '.' ? '' : task.base; return { basePath, pathSegmentSeparator: '/', concurrency: this._settings.concurrency, deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative), entryFilter: this.entryFilter.getFilter(task.positive, task.negative), errorFilter: this.errorFilter.getFilter(), followSymbolicLinks: this._settings.followSymbolicLinks, fs: this._settings.fs, stats: this._settings.stats, throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink, transform: this.entryTransformer.getTransformer() }; } _getMicromatchOptions() { return { dot: this._settings.dot, matchBase: this._settings.baseNameMatch, nobrace: !this._settings.braceExpansion, nocase: !this._settings.caseSensitiveMatch, noext: !this._settings.extglob, noglobstar: !this._settings.globstar, posix: true, strictSlashes: false }; } } provider.default = Provider; Object.defineProperty(async$7, "__esModule", { value: true }); const async_1$1 = async$6; const provider_1$2 = provider; class ProviderAsync extends provider_1$2.default { constructor() { super(...arguments); this._reader = new async_1$1.default(this._settings); } async read(task) { const root = this._getRootDirectory(task); const options = this._getReaderOptions(task); const entries = await this.api(root, task, options); return entries.map((entry) => options.transform(entry)); } api(root, task, options) { if (task.dynamic) { return this._reader.dynamic(root, options); } return this._reader.static(task.patterns, options); } } async$7.default = ProviderAsync; var stream = {}; Object.defineProperty(stream, "__esModule", { value: true }); const stream_1$1 = require$$0$7; const stream_2 = stream$1; const provider_1$1 = provider; class ProviderStream extends provider_1$1.default { constructor() { super(...arguments); this._reader = new stream_2.default(this._settings); } read(task) { const root = this._getRootDirectory(task); const options = this._getReaderOptions(task); const source = this.api(root, task, options); const destination = new stream_1$1.Readable({ objectMode: true, read: () => { } }); source .once('error', (error) => destination.emit('error', error)) .on('data', (entry) => destination.emit('data', options.transform(entry))) .once('end', () => destination.emit('end')); destination .once('close', () => source.destroy()); return destination; } api(root, task, options) { if (task.dynamic) { return this._reader.dynamic(root, options); } return this._reader.static(task.patterns, options); } } stream.default = ProviderStream; var sync$2 = {}; var sync$1 = {}; Object.defineProperty(sync$1, "__esModule", { value: true }); const fsStat = out$1; const fsWalk = out$3; const reader_1 = reader; class ReaderSync extends reader_1.default { constructor() { super(...arguments); this._walkSync = fsWalk.walkSync; this._statSync = fsStat.statSync; } dynamic(root, options) { return this._walkSync(root, options); } static(patterns, options) { const entries = []; for (const pattern of patterns) { const filepath = this._getFullEntryPath(pattern); const entry = this._getEntry(filepath, pattern, options); if (entry === null || !options.entryFilter(entry)) { continue; } entries.push(entry); } return entries; } _getEntry(filepath, pattern, options) { try { const stats = this._getStat(filepath); return this._makeEntry(stats, pattern); } catch (error) { if (options.errorFilter(error)) { return null; } throw error; } } _getStat(filepath) { return this._statSync(filepath, this._fsStatSettings); } } sync$1.default = ReaderSync; Object.defineProperty(sync$2, "__esModule", { value: true }); const sync_1$1 = sync$1; const provider_1 = provider; class ProviderSync extends provider_1.default { constructor() { super(...arguments); this._reader = new sync_1$1.default(this._settings); } read(task) { const root = this._getRootDirectory(task); const options = this._getReaderOptions(task); const entries = this.api(root, task, options); return entries.map(options.transform); } api(root, task, options) { if (task.dynamic) { return this._reader.dynamic(root, options); } return this._reader.static(task.patterns, options); } } sync$2.default = ProviderSync; var settings = {}; (function (exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0; const fs = require$$0__default; const os = require$$2; /** * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero. * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107 */ const CPU_COUNT = Math.max(os.cpus().length, 1); exports.DEFAULT_FILE_SYSTEM_ADAPTER = { lstat: fs.lstat, lstatSync: fs.lstatSync, stat: fs.stat, statSync: fs.statSync, readdir: fs.readdir, readdirSync: fs.readdirSync }; class Settings { constructor(_options = {}) { this._options = _options; this.absolute = this._getValue(this._options.absolute, false); this.baseNameMatch = this._getValue(this._options.baseNameMatch, false); this.braceExpansion = this._getValue(this._options.braceExpansion, true); this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true); this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT); this.cwd = this._getValue(this._options.cwd, process.cwd()); this.deep = this._getValue(this._options.deep, Infinity); this.dot = this._getValue(this._options.dot, false); this.extglob = this._getValue(this._options.extglob, true); this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true); this.fs = this._getFileSystemMethods(this._options.fs); this.globstar = this._getValue(this._options.globstar, true); this.ignore = this._getValue(this._options.ignore, []); this.markDirectories = this._getValue(this._options.markDirectories, false); this.objectMode = this._getValue(this._options.objectMode, false); this.onlyDirectories = this._getValue(this._options.onlyDirectories, false); this.onlyFiles = this._getValue(this._options.onlyFiles, true); this.stats = this._getValue(this._options.stats, false); this.suppressErrors = this._getValue(this._options.suppressErrors, false); this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false); this.unique = this._getValue(this._options.unique, true); if (this.onlyDirectories) { this.onlyFiles = false; } if (this.stats) { this.objectMode = true; } // Remove the cast to the array in the next major (#404). this.ignore = [].concat(this.ignore); } _getValue(option, value) { return option === undefined ? value : option; } _getFileSystemMethods(methods = {}) { return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods); } } exports.default = Settings; } (settings)); const taskManager = tasks; const async_1 = async$7; const stream_1 = stream; const sync_1 = sync$2; const settings_1 = settings; const utils = utils$g; async function FastGlob(source, options) { assertPatternsInput(source); const works = getWorks(source, async_1.default, options); const result = await Promise.all(works); return utils.array.flatten(result); } // https://github.com/typescript-eslint/typescript-eslint/issues/60 // eslint-disable-next-line no-redeclare (function (FastGlob) { FastGlob.glob = FastGlob; FastGlob.globSync = sync; FastGlob.globStream = stream; FastGlob.async = FastGlob; function sync(source, options) { assertPatternsInput(source); const works = getWorks(source, sync_1.default, options); return utils.array.flatten(works); } FastGlob.sync = sync; function stream(source, options) { assertPatternsInput(source); const works = getWorks(source, stream_1.default, options); /** * The stream returned by the provider cannot work with an asynchronous iterator. * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams. * This affects performance (+25%). I don't see best solution right now. */ return utils.stream.merge(works); } FastGlob.stream = stream; function generateTasks(source, options) { assertPatternsInput(source); const patterns = [].concat(source); const settings = new settings_1.default(options); return taskManager.generate(patterns, settings); } FastGlob.generateTasks = generateTasks; function isDynamicPattern(source, options) { assertPatternsInput(source); const settings = new settings_1.default(options); return utils.pattern.isDynamicPattern(source, settings); } FastGlob.isDynamicPattern = isDynamicPattern; function escapePath(source) { assertPatternsInput(source); return utils.path.escape(source); } FastGlob.escapePath = escapePath; function convertPathToPattern(source) { assertPatternsInput(source); return utils.path.convertPathToPattern(source); } FastGlob.convertPathToPattern = convertPathToPattern; (function (posix) { function escapePath(source) { assertPatternsInput(source); return utils.path.escapePosixPath(source); } posix.escapePath = escapePath; function convertPathToPattern(source) { assertPatternsInput(source); return utils.path.convertPosixPathToPattern(source); } posix.convertPathToPattern = convertPathToPattern; })(FastGlob.posix || (FastGlob.posix = {})); (function (win32) { function escapePath(source) { assertPatternsInput(source); return utils.path.escapeWindowsPath(source); } win32.escapePath = escapePath; function convertPathToPattern(source) { assertPatternsInput(source); return utils.path.convertWindowsPathToPattern(source); } win32.convertPathToPattern = convertPathToPattern; })(FastGlob.win32 || (FastGlob.win32 = {})); })(FastGlob || (FastGlob = {})); function getWorks(source, _Provider, options) { const patterns = [].concat(source); const settings = new settings_1.default(options); const tasks = taskManager.generate(patterns, settings); const provider = new _Provider(settings); return tasks.map(provider.read, provider); } function assertPatternsInput(input) { const source = [].concat(input); const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item)); if (!isValidSource) { throw new TypeError('Patterns must be a string (non empty) or an array of strings'); } } var out = FastGlob; var glob = /*@__PURE__*/getDefaultExportFromCjs(out); var dist = {}; (function (exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.lilconfigSync = exports.lilconfig = exports.defaultLoaders = void 0; const path = require$$0$4; const fs = require$$0__default; const os = require$$2; const fsReadFileAsync = fs.promises.readFile; function getDefaultSearchPlaces(name) { return [ 'package.json', `.${name}rc.json`, `.${name}rc.js`, `.${name}rc.cjs`, `.config/${name}rc`, `.config/${name}rc.json`, `.config/${name}rc.js`, `.config/${name}rc.cjs`, `${name}.config.js`, `${name}.config.cjs`, ]; } function parentDir(p) { return path.dirname(p) || path.sep; } exports.defaultLoaders = Object.freeze({ '.js': __require, '.json': __require, '.cjs': __require, noExt(_, content) { return JSON.parse(content); }, }); function getExtDesc(ext) { return ext === 'noExt' ? 'files without extensions' : `extension "${ext}"`; } function getOptions(name, options = {}) { const conf = { stopDir: os.homedir(), searchPlaces: getDefaultSearchPlaces(name), ignoreEmptySearchPlaces: true, cache: true, transform: (x) => x, packageProp: [name], ...options, loaders: { ...exports.defaultLoaders, ...options.loaders }, }; conf.searchPlaces.forEach(place => { const key = path.extname(place) || 'noExt'; const loader = conf.loaders[key]; if (!loader) { throw new Error(`No loader specified for ${getExtDesc(key)}, so searchPlaces item "${place}" is invalid`); } if (typeof loader !== 'function') { throw new Error(`loader for ${getExtDesc(key)} is not a function (type provided: "${typeof loader}"), so searchPlaces item "${place}" is invalid`); } }); return conf; } function getPackageProp(props, obj) { if (typeof props === 'string' && props in obj) return obj[props]; return ((Array.isArray(props) ? props : props.split('.')).reduce((acc, prop) => (acc === undefined ? acc : acc[prop]), obj) || null); } function validateFilePath(filepath) { if (!filepath) throw new Error('load must pass a non-empty string'); } function validateLoader(loader, ext) { if (!loader) throw new Error(`No loader specified for extension "${ext}"`); if (typeof loader !== 'function') throw new Error('loader is not a function'); } const makeEmplace = (enableCache) => (c, filepath, res) => { if (enableCache) c.set(filepath, res); return res; }; function lilconfig(name, options) { const { ignoreEmptySearchPlaces, loaders, packageProp, searchPlaces, stopDir, transform, cache, } = getOptions(name, options); const searchCache = new Map(); const loadCache = new Map(); const emplace = makeEmplace(cache); return { async search(searchFrom = process.cwd()) { const result = { config: null, filepath: '', }; const visited = new Set(); let dir = searchFrom; dirLoop: while (true) { if (cache) { const r = searchCache.get(dir); if (r !== undefined) { for (const p of visited) searchCache.set(p, r); return r; } visited.add(dir); } for (const searchPlace of searchPlaces) { const filepath = path.join(dir, searchPlace); try { await fs.promises.access(filepath); } catch (_a) { continue; } const content = String(await fsReadFileAsync(filepath)); const loaderKey = path.extname(searchPlace) || 'noExt'; const loader = loaders[loaderKey]; if (searchPlace === 'package.json') { const pkg = await loader(filepath, content); const maybeConfig = getPackageProp(packageProp, pkg); if (maybeConfig != null) { result.config = maybeConfig; result.filepath = filepath; break dirLoop; } continue; } const isEmpty = content.trim() === ''; if (isEmpty && ignoreEmptySearchPlaces) continue; if (isEmpty) { result.isEmpty = true; result.config = undefined; } else { validateLoader(loader, loaderKey); result.config = await loader(filepath, content); } result.filepath = filepath; break dirLoop; } if (dir === stopDir || dir === parentDir(dir)) break dirLoop; dir = parentDir(dir); } const transformed = result.filepath === '' && result.config === null ? transform(null) : transform(result); if (cache) { for (const p of visited) searchCache.set(p, transformed); } return transformed; }, async load(filepath) { validateFilePath(filepath); const absPath = path.resolve(process.cwd(), filepath); if (cache && loadCache.has(absPath)) { return loadCache.get(absPath); } const { base, ext } = path.parse(absPath); const loaderKey = ext || 'noExt'; const loader = loaders[loaderKey]; validateLoader(loader, loaderKey); const content = String(await fsReadFileAsync(absPath)); if (base === 'package.json') { const pkg = await loader(absPath, content); return emplace(loadCache, absPath, transform({ config: getPackageProp(packageProp, pkg), filepath: absPath, })); } const result = { config: null, filepath: absPath, }; const isEmpty = content.trim() === ''; if (isEmpty && ignoreEmptySearchPlaces) return emplace(loadCache, absPath, transform({ config: undefined, filepath: absPath, isEmpty: true, })); result.config = isEmpty ? undefined : await loader(absPath, content); return emplace(loadCache, absPath, transform(isEmpty ? { ...result, isEmpty, config: undefined } : result)); }, clearLoadCache() { if (cache) loadCache.clear(); }, clearSearchCache() { if (cache) searchCache.clear(); }, clearCaches() { if (cache) { loadCache.clear(); searchCache.clear(); } }, }; } exports.lilconfig = lilconfig; function lilconfigSync(name, options) { const { ignoreEmptySearchPlaces, loaders, packageProp, searchPlaces, stopDir, transform, cache, } = getOptions(name, options); const searchCache = new Map(); const loadCache = new Map(); const emplace = makeEmplace(cache); return { search(searchFrom = process.cwd()) { const result = { config: null, filepath: '', }; const visited = new Set(); let dir = searchFrom; dirLoop: while (true) { if (cache) { const r = searchCache.get(dir); if (r !== undefined) { for (const p of visited) searchCache.set(p, r); return r; } visited.add(dir); } for (const searchPlace of searchPlaces) { const filepath = path.join(dir, searchPlace); try { fs.accessSync(filepath); } catch (_a) { continue; } const loaderKey = path.extname(searchPlace) || 'noExt'; const loader = loaders[loaderKey]; const content = String(fs.readFileSync(filepath)); if (searchPlace === 'package.json') { const pkg = loader(filepath, content); const maybeConfig = getPackageProp(packageProp, pkg); if (maybeConfig != null) { result.config = maybeConfig; result.filepath = filepath; break dirLoop; } continue; } const isEmpty = content.trim() === ''; if (isEmpty && ignoreEmptySearchPlaces) continue; if (isEmpty) { result.isEmpty = true; result.config = undefined; } else { validateLoader(loader, loaderKey); result.config = loader(filepath, content); } result.filepath = filepath; break dirLoop; } if (dir === stopDir || dir === parentDir(dir)) break dirLoop; dir = parentDir(dir); } const transformed = result.filepath === '' && result.config === null ? transform(null) : transform(result); if (cache) { for (const p of visited) searchCache.set(p, transformed); } return transformed; }, load(filepath) { validateFilePath(filepath); const absPath = path.resolve(process.cwd(), filepath); if (cache && loadCache.has(absPath)) { return loadCache.get(absPath); } const { base, ext } = path.parse(absPath); const loaderKey = ext || 'noExt'; const loader = loaders[loaderKey]; validateLoader(loader, loaderKey); const content = String(fs.readFileSync(absPath)); if (base === 'package.json') { const pkg = loader(absPath, content); return transform({ config: getPackageProp(packageProp, pkg), filepath: absPath, }); } const result = { config: null, filepath: absPath, }; const isEmpty = content.trim() === ''; if (isEmpty && ignoreEmptySearchPlaces) return emplace(loadCache, absPath, transform({ filepath: absPath, config: undefined, isEmpty: true, })); result.config = isEmpty ? undefined : loader(absPath, content); return emplace(loadCache, absPath, transform(isEmpty ? { ...result, isEmpty, config: undefined } : result)); }, clearLoadCache() { if (cache) loadCache.clear(); }, clearSearchCache() { if (cache) searchCache.clear(); }, clearCaches() { if (cache) { loadCache.clear(); searchCache.clear(); } }, }; } exports.lilconfigSync = lilconfigSync; } (dist)); const ALIAS = Symbol.for('yaml.alias'); const DOC = Symbol.for('yaml.document'); const MAP = Symbol.for('yaml.map'); const PAIR = Symbol.for('yaml.pair'); const SCALAR$1 = Symbol.for('yaml.scalar'); const SEQ = Symbol.for('yaml.seq'); const NODE_TYPE = Symbol.for('yaml.node.type'); const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS; const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC; const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP; const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR; const isScalar$1 = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR$1; const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ; function isCollection$1(node) { if (node && typeof node === 'object') switch (node[NODE_TYPE]) { case MAP: case SEQ: return true; } return false; } function isNode$1(node) { if (node && typeof node === 'object') switch (node[NODE_TYPE]) { case ALIAS: case MAP: case SCALAR$1: case SEQ: return true; } return false; } const hasAnchor = (node) => (isScalar$1(node) || isCollection$1(node)) && !!node.anchor; const BREAK$1 = Symbol('break visit'); const SKIP$1 = Symbol('skip children'); const REMOVE$1 = Symbol('remove node'); /** * Apply a visitor to an AST node or document. * * Walks through the tree (depth-first) starting from `node`, calling a * `visitor` function with three arguments: * - `key`: For sequence values and map `Pair`, the node's index in the * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. * `null` for the root node. * - `node`: The current node. * - `path`: The ancestry of the current node. * * The return value of the visitor may be used to control the traversal: * - `undefined` (default): Do nothing and continue * - `visit.SKIP`: Do not visit the children of this node, continue with next * sibling * - `visit.BREAK`: Terminate traversal completely * - `visit.REMOVE`: Remove the current node, then continue with the next one * - `Node`: Replace the current node, then continue by visiting it * - `number`: While iterating the items of a sequence or map, set the index * of the next step. This is useful especially if the index of the current * node has changed. * * If `visitor` is a single function, it will be called with all values * encountered in the tree, including e.g. `null` values. Alternatively, * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, * `Alias` and `Scalar` node. To define the same visitor function for more than * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most * specific defined one will be used for each node. */ function visit$1(node, visitor) { const visitor_ = initVisitor(visitor); if (isDocument(node)) { const cd = visit_(null, node.contents, visitor_, Object.freeze([node])); if (cd === REMOVE$1) node.contents = null; } else visit_(null, node, visitor_, Object.freeze([])); } // Without the `as symbol` casts, TS declares these in the `visit` // namespace using `var`, but then complains about that because // `unique symbol` must be `const`. /** Terminate visit traversal completely */ visit$1.BREAK = BREAK$1; /** Do not visit the children of the current node */ visit$1.SKIP = SKIP$1; /** Remove the current node */ visit$1.REMOVE = REMOVE$1; function visit_(key, node, visitor, path) { const ctrl = callVisitor(key, node, visitor, path); if (isNode$1(ctrl) || isPair(ctrl)) { replaceNode(key, path, ctrl); return visit_(key, ctrl, visitor, path); } if (typeof ctrl !== 'symbol') { if (isCollection$1(node)) { path = Object.freeze(path.concat(node)); for (let i = 0; i < node.items.length; ++i) { const ci = visit_(i, node.items[i], visitor, path); if (typeof ci === 'number') i = ci - 1; else if (ci === BREAK$1) return BREAK$1; else if (ci === REMOVE$1) { node.items.splice(i, 1); i -= 1; } } } else if (isPair(node)) { path = Object.freeze(path.concat(node)); const ck = visit_('key', node.key, visitor, path); if (ck === BREAK$1) return BREAK$1; else if (ck === REMOVE$1) node.key = null; const cv = visit_('value', node.value, visitor, path); if (cv === BREAK$1) return BREAK$1; else if (cv === REMOVE$1) node.value = null; } } return ctrl; } /** * Apply an async visitor to an AST node or document. * * Walks through the tree (depth-first) starting from `node`, calling a * `visitor` function with three arguments: * - `key`: For sequence values and map `Pair`, the node's index in the * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. * `null` for the root node. * - `node`: The current node. * - `path`: The ancestry of the current node. * * The return value of the visitor may be used to control the traversal: * - `Promise`: Must resolve to one of the following values * - `undefined` (default): Do nothing and continue * - `visit.SKIP`: Do not visit the children of this node, continue with next * sibling * - `visit.BREAK`: Terminate traversal completely * - `visit.REMOVE`: Remove the current node, then continue with the next one * - `Node`: Replace the current node, then continue by visiting it * - `number`: While iterating the items of a sequence or map, set the index * of the next step. This is useful especially if the index of the current * node has changed. * * If `visitor` is a single function, it will be called with all values * encountered in the tree, including e.g. `null` values. Alternatively, * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, * `Alias` and `Scalar` node. To define the same visitor function for more than * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most * specific defined one will be used for each node. */ async function visitAsync(node, visitor) { const visitor_ = initVisitor(visitor); if (isDocument(node)) { const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node])); if (cd === REMOVE$1) node.contents = null; } else await visitAsync_(null, node, visitor_, Object.freeze([])); } // Without the `as symbol` casts, TS declares these in the `visit` // namespace using `var`, but then complains about that because // `unique symbol` must be `const`. /** Terminate visit traversal completely */ visitAsync.BREAK = BREAK$1; /** Do not visit the children of the current node */ visitAsync.SKIP = SKIP$1; /** Remove the current node */ visitAsync.REMOVE = REMOVE$1; async function visitAsync_(key, node, visitor, path) { const ctrl = await callVisitor(key, node, visitor, path); if (isNode$1(ctrl) || isPair(ctrl)) { replaceNode(key, path, ctrl); return visitAsync_(key, ctrl, visitor, path); } if (typeof ctrl !== 'symbol') { if (isCollection$1(node)) { path = Object.freeze(path.concat(node)); for (let i = 0; i < node.items.length; ++i) { const ci = await visitAsync_(i, node.items[i], visitor, path); if (typeof ci === 'number') i = ci - 1; else if (ci === BREAK$1) return BREAK$1; else if (ci === REMOVE$1) { node.items.splice(i, 1); i -= 1; } } } else if (isPair(node)) { path = Object.freeze(path.concat(node)); const ck = await visitAsync_('key', node.key, visitor, path); if (ck === BREAK$1) return BREAK$1; else if (ck === REMOVE$1) node.key = null; const cv = await visitAsync_('value', node.value, visitor, path); if (cv === BREAK$1) return BREAK$1; else if (cv === REMOVE$1) node.value = null; } } return ctrl; } function initVisitor(visitor) { if (typeof visitor === 'object' && (visitor.Collection || visitor.Node || visitor.Value)) { return Object.assign({ Alias: visitor.Node, Map: visitor.Node, Scalar: visitor.Node, Seq: visitor.Node }, visitor.Value && { Map: visitor.Value, Scalar: visitor.Value, Seq: visitor.Value }, visitor.Collection && { Map: visitor.Collection, Seq: visitor.Collection }, visitor); } return visitor; } function callVisitor(key, node, visitor, path) { if (typeof visitor === 'function') return visitor(key, node, path); if (isMap(node)) return visitor.Map?.(key, node, path); if (isSeq(node)) return visitor.Seq?.(key, node, path); if (isPair(node)) return visitor.Pair?.(key, node, path); if (isScalar$1(node)) return visitor.Scalar?.(key, node, path); if (isAlias(node)) return visitor.Alias?.(key, node, path); return undefined; } function replaceNode(key, path, node) { const parent = path[path.length - 1]; if (isCollection$1(parent)) { parent.items[key] = node; } else if (isPair(parent)) { if (key === 'key') parent.key = node; else parent.value = node; } else if (isDocument(parent)) { parent.contents = node; } else { const pt = isAlias(parent) ? 'alias' : 'scalar'; throw new Error(`Cannot replace node with ${pt} parent`); } } const escapeChars = { '!': '%21', ',': '%2C', '[': '%5B', ']': '%5D', '{': '%7B', '}': '%7D' }; const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]); class Directives { constructor(yaml, tags) { /** * The directives-end/doc-start marker `---`. If `null`, a marker may still be * included in the document's stringified representation. */ this.docStart = null; /** The doc-end marker `...`. */ this.docEnd = false; this.yaml = Object.assign({}, Directives.defaultYaml, yaml); this.tags = Object.assign({}, Directives.defaultTags, tags); } clone() { const copy = new Directives(this.yaml, this.tags); copy.docStart = this.docStart; return copy; } /** * During parsing, get a Directives instance for the current document and * update the stream state according to the current version's spec. */ atDocument() { const res = new Directives(this.yaml, this.tags); switch (this.yaml.version) { case '1.1': this.atNextDocument = true; break; case '1.2': this.atNextDocument = false; this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.2' }; this.tags = Object.assign({}, Directives.defaultTags); break; } return res; } /** * @param onError - May be called even if the action was successful * @returns `true` on success */ add(line, onError) { if (this.atNextDocument) { this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' }; this.tags = Object.assign({}, Directives.defaultTags); this.atNextDocument = false; } const parts = line.trim().split(/[ \t]+/); const name = parts.shift(); switch (name) { case '%TAG': { if (parts.length !== 2) { onError(0, '%TAG directive should contain exactly two parts'); if (parts.length < 2) return false; } const [handle, prefix] = parts; this.tags[handle] = prefix; return true; } case '%YAML': { this.yaml.explicit = true; if (parts.length !== 1) { onError(0, '%YAML directive should contain exactly one part'); return false; } const [version] = parts; if (version === '1.1' || version === '1.2') { this.yaml.version = version; return true; } else { const isValid = /^\d+\.\d+$/.test(version); onError(6, `Unsupported YAML version ${version}`, isValid); return false; } } default: onError(0, `Unknown directive ${name}`, true); return false; } } /** * Resolves a tag, matching handles to those defined in %TAG directives. * * @returns Resolved tag, which may also be the non-specific tag `'!'` or a * `'!local'` tag, or `null` if unresolvable. */ tagName(source, onError) { if (source === '!') return '!'; // non-specific tag if (source[0] !== '!') { onError(`Not a valid tag: ${source}`); return null; } if (source[1] === '<') { const verbatim = source.slice(2, -1); if (verbatim === '!' || verbatim === '!!') { onError(`Verbatim tags aren't resolved, so ${source} is invalid.`); return null; } if (source[source.length - 1] !== '>') onError('Verbatim tags must end with a >'); return verbatim; } const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/s); if (!suffix) onError(`The ${source} tag has no suffix`); const prefix = this.tags[handle]; if (prefix) { try { return prefix + decodeURIComponent(suffix); } catch (error) { onError(String(error)); return null; } } if (handle === '!') return source; // local tag onError(`Could not resolve tag: ${source}`); return null; } /** * Given a fully resolved tag, returns its printable string form, * taking into account current tag prefixes and defaults. */ tagString(tag) { for (const [handle, prefix] of Object.entries(this.tags)) { if (tag.startsWith(prefix)) return handle + escapeTagName(tag.substring(prefix.length)); } return tag[0] === '!' ? tag : `!<${tag}>`; } toString(doc) { const lines = this.yaml.explicit ? [`%YAML ${this.yaml.version || '1.2'}`] : []; const tagEntries = Object.entries(this.tags); let tagNames; if (doc && tagEntries.length > 0 && isNode$1(doc.contents)) { const tags = {}; visit$1(doc.contents, (_key, node) => { if (isNode$1(node) && node.tag) tags[node.tag] = true; }); tagNames = Object.keys(tags); } else tagNames = []; for (const [handle, prefix] of tagEntries) { if (handle === '!!' && prefix === 'tag:yaml.org,2002:') continue; if (!doc || tagNames.some(tn => tn.startsWith(prefix))) lines.push(`%TAG ${handle} ${prefix}`); } return lines.join('\n'); } } Directives.defaultYaml = { explicit: false, version: '1.2' }; Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' }; /** * Verify that the input string is a valid anchor. * * Will throw on errors. */ function anchorIsValid(anchor) { if (/[\x00-\x19\s,[\]{}]/.test(anchor)) { const sa = JSON.stringify(anchor); const msg = `Anchor must not contain whitespace or control characters: ${sa}`; throw new Error(msg); } return true; } function anchorNames(root) { const anchors = new Set(); visit$1(root, { Value(_key, node) { if (node.anchor) anchors.add(node.anchor); } }); return anchors; } /** Find a new anchor name with the given `prefix` and a one-indexed suffix. */ function findNewAnchor(prefix, exclude) { for (let i = 1; true; ++i) { const name = `${prefix}${i}`; if (!exclude.has(name)) return name; } } function createNodeAnchors(doc, prefix) { const aliasObjects = []; const sourceObjects = new Map(); let prevAnchors = null; return { onAnchor: (source) => { aliasObjects.push(source); if (!prevAnchors) prevAnchors = anchorNames(doc); const anchor = findNewAnchor(prefix, prevAnchors); prevAnchors.add(anchor); return anchor; }, /** * With circular references, the source node is only resolved after all * of its child nodes are. This is why anchors are set only after all of * the nodes have been created. */ setAnchors: () => { for (const source of aliasObjects) { const ref = sourceObjects.get(source); if (typeof ref === 'object' && ref.anchor && (isScalar$1(ref.node) || isCollection$1(ref.node))) { ref.node.anchor = ref.anchor; } else { const error = new Error('Failed to resolve repeated object (this should not happen)'); error.source = source; throw error; } } }, sourceObjects }; } /** * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec, * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the * 2021 edition: https://tc39.es/ecma262/#sec-json.parse * * Includes extensions for handling Map and Set objects. */ function applyReviver(reviver, obj, key, val) { if (val && typeof val === 'object') { if (Array.isArray(val)) { for (let i = 0, len = val.length; i < len; ++i) { const v0 = val[i]; const v1 = applyReviver(reviver, val, String(i), v0); if (v1 === undefined) delete val[i]; else if (v1 !== v0) val[i] = v1; } } else if (val instanceof Map) { for (const k of Array.from(val.keys())) { const v0 = val.get(k); const v1 = applyReviver(reviver, val, k, v0); if (v1 === undefined) val.delete(k); else if (v1 !== v0) val.set(k, v1); } } else if (val instanceof Set) { for (const v0 of Array.from(val)) { const v1 = applyReviver(reviver, val, v0, v0); if (v1 === undefined) val.delete(v0); else if (v1 !== v0) { val.delete(v0); val.add(v1); } } } else { for (const [k, v0] of Object.entries(val)) { const v1 = applyReviver(reviver, val, k, v0); if (v1 === undefined) delete val[k]; else if (v1 !== v0) val[k] = v1; } } } return reviver.call(obj, key, val); } /** * Recursively convert any node or its contents to native JavaScript * * @param value - The input value * @param arg - If `value` defines a `toJSON()` method, use this * as its first argument * @param ctx - Conversion context, originally set in Document#toJS(). If * `{ keep: true }` is not set, output should be suitable for JSON * stringification. */ function toJS(value, arg, ctx) { // eslint-disable-next-line @typescript-eslint/no-unsafe-return if (Array.isArray(value)) return value.map((v, i) => toJS(v, String(i), ctx)); if (value && typeof value.toJSON === 'function') { // eslint-disable-next-line @typescript-eslint/no-unsafe-call if (!ctx || !hasAnchor(value)) return value.toJSON(arg, ctx); const data = { aliasCount: 0, count: 1, res: undefined }; ctx.anchors.set(value, data); ctx.onCreate = res => { data.res = res; delete ctx.onCreate; }; const res = value.toJSON(arg, ctx); if (ctx.onCreate) ctx.onCreate(res); return res; } if (typeof value === 'bigint' && !ctx?.keep) return Number(value); return value; } class NodeBase { constructor(type) { Object.defineProperty(this, NODE_TYPE, { value: type }); } /** Create a copy of this node. */ clone() { const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); if (this.range) copy.range = this.range.slice(); return copy; } /** A plain JavaScript representation of this node. */ toJS(doc, { mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { if (!isDocument(doc)) throw new TypeError('A document argument is required'); const ctx = { anchors: new Map(), doc, keep: true, mapAsMap: mapAsMap === true, mapKeyWarned: false, maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100 }; const res = toJS(this, '', ctx); if (typeof onAnchor === 'function') for (const { count, res } of ctx.anchors.values()) onAnchor(res, count); return typeof reviver === 'function' ? applyReviver(reviver, { '': res }, '', res) : res; } } class Alias extends NodeBase { constructor(source) { super(ALIAS); this.source = source; Object.defineProperty(this, 'tag', { set() { throw new Error('Alias nodes cannot have tags'); } }); } /** * Resolve the value of this alias within `doc`, finding the last * instance of the `source` anchor before this node. */ resolve(doc) { let found = undefined; visit$1(doc, { Node: (_key, node) => { if (node === this) return visit$1.BREAK; if (node.anchor === this.source) found = node; } }); return found; } toJSON(_arg, ctx) { if (!ctx) return { source: this.source }; const { anchors, doc, maxAliasCount } = ctx; const source = this.resolve(doc); if (!source) { const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; throw new ReferenceError(msg); } let data = anchors.get(source); if (!data) { // Resolve anchors for Node.prototype.toJS() toJS(source, null, ctx); data = anchors.get(source); } /* istanbul ignore if */ if (!data || data.res === undefined) { const msg = 'This should not happen: Alias anchor was not resolved?'; throw new ReferenceError(msg); } if (maxAliasCount >= 0) { data.count += 1; if (data.aliasCount === 0) data.aliasCount = getAliasCount(doc, source, anchors); if (data.count * data.aliasCount > maxAliasCount) { const msg = 'Excessive alias count indicates a resource exhaustion attack'; throw new ReferenceError(msg); } } return data.res; } toString(ctx, _onComment, _onChompKeep) { const src = `*${this.source}`; if (ctx) { anchorIsValid(this.source); if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) { const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; throw new Error(msg); } if (ctx.implicitKey) return `${src} `; } return src; } } function getAliasCount(doc, node, anchors) { if (isAlias(node)) { const source = node.resolve(doc); const anchor = anchors && source && anchors.get(source); return anchor ? anchor.count * anchor.aliasCount : 0; } else if (isCollection$1(node)) { let count = 0; for (const item of node.items) { const c = getAliasCount(doc, item, anchors); if (c > count) count = c; } return count; } else if (isPair(node)) { const kc = getAliasCount(doc, node.key, anchors); const vc = getAliasCount(doc, node.value, anchors); return Math.max(kc, vc); } return 1; } const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object'); class Scalar extends NodeBase { constructor(value) { super(SCALAR$1); this.value = value; } toJSON(arg, ctx) { return ctx?.keep ? this.value : toJS(this.value, arg, ctx); } toString() { return String(this.value); } } Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED'; Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL'; Scalar.PLAIN = 'PLAIN'; Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE'; Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE'; const defaultTagPrefix = 'tag:yaml.org,2002:'; function findTagObject(value, tagName, tags) { if (tagName) { const match = tags.filter(t => t.tag === tagName); const tagObj = match.find(t => !t.format) ?? match[0]; if (!tagObj) throw new Error(`Tag ${tagName} not found`); return tagObj; } return tags.find(t => t.identify?.(value) && !t.format); } function createNode(value, tagName, ctx) { if (isDocument(value)) value = value.contents; if (isNode$1(value)) return value; if (isPair(value)) { const map = ctx.schema[MAP].createNode?.(ctx.schema, null, ctx); map.items.push(value); return map; } if (value instanceof String || value instanceof Number || value instanceof Boolean || (typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere ) { // https://tc39.es/ecma262/#sec-serializejsonproperty value = value.valueOf(); } const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx; // Detect duplicate references to the same object & use Alias nodes for all // after first. The `ref` wrapper allows for circular references to resolve. let ref = undefined; if (aliasDuplicateObjects && value && typeof value === 'object') { ref = sourceObjects.get(value); if (ref) { if (!ref.anchor) ref.anchor = onAnchor(value); return new Alias(ref.anchor); } else { ref = { anchor: null, node: null }; sourceObjects.set(value, ref); } } if (tagName?.startsWith('!!')) tagName = defaultTagPrefix + tagName.slice(2); let tagObj = findTagObject(value, tagName, schema.tags); if (!tagObj) { if (value && typeof value.toJSON === 'function') { // eslint-disable-next-line @typescript-eslint/no-unsafe-call value = value.toJSON(); } if (!value || typeof value !== 'object') { const node = new Scalar(value); if (ref) ref.node = node; return node; } tagObj = value instanceof Map ? schema[MAP] : Symbol.iterator in Object(value) ? schema[SEQ] : schema[MAP]; } if (onTagObj) { onTagObj(tagObj); delete ctx.onTagObj; } const node = tagObj?.createNode ? tagObj.createNode(ctx.schema, value, ctx) : typeof tagObj?.nodeClass?.from === 'function' ? tagObj.nodeClass.from(ctx.schema, value, ctx) : new Scalar(value); if (tagName) node.tag = tagName; else if (!tagObj.default) node.tag = tagObj.tag; if (ref) ref.node = node; return node; } function collectionFromPath(schema, path, value) { let v = value; for (let i = path.length - 1; i >= 0; --i) { const k = path[i]; if (typeof k === 'number' && Number.isInteger(k) && k >= 0) { const a = []; a[k] = v; v = a; } else { v = new Map([[k, v]]); } } return createNode(v, undefined, { aliasDuplicateObjects: false, keepUndefined: false, onAnchor: () => { throw new Error('This should not happen, please report a bug.'); }, schema, sourceObjects: new Map() }); } // Type guard is intentionally a little wrong so as to be more useful, // as it does not cover untypable empty non-string iterables (e.g. []). const isEmptyPath = (path) => path == null || (typeof path === 'object' && !!path[Symbol.iterator]().next().done); class Collection extends NodeBase { constructor(type, schema) { super(type); Object.defineProperty(this, 'schema', { value: schema, configurable: true, enumerable: false, writable: true }); } /** * Create a copy of this collection. * * @param schema - If defined, overwrites the original's schema */ clone(schema) { const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); if (schema) copy.schema = schema; copy.items = copy.items.map(it => isNode$1(it) || isPair(it) ? it.clone(schema) : it); if (this.range) copy.range = this.range.slice(); return copy; } /** * Adds a value to the collection. For `!!map` and `!!omap` the value must * be a Pair instance or a `{ key, value }` object, which may not have a key * that already exists in the map. */ addIn(path, value) { if (isEmptyPath(path)) this.add(value); else { const [key, ...rest] = path; const node = this.get(key, true); if (isCollection$1(node)) node.addIn(rest, value); else if (node === undefined && this.schema) this.set(key, collectionFromPath(this.schema, rest, value)); else throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); } } /** * Removes a value from the collection. * @returns `true` if the item was found and removed. */ deleteIn(path) { const [key, ...rest] = path; if (rest.length === 0) return this.delete(key); const node = this.get(key, true); if (isCollection$1(node)) return node.deleteIn(rest); else throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); } /** * Returns item at `key`, or `undefined` if not found. By default unwraps * scalar values from their surrounding node; to disable set `keepScalar` to * `true` (collections are always returned intact). */ getIn(path, keepScalar) { const [key, ...rest] = path; const node = this.get(key, true); if (rest.length === 0) return !keepScalar && isScalar$1(node) ? node.value : node; else return isCollection$1(node) ? node.getIn(rest, keepScalar) : undefined; } hasAllNullValues(allowScalar) { return this.items.every(node => { if (!isPair(node)) return false; const n = node.value; return (n == null || (allowScalar && isScalar$1(n) && n.value == null && !n.commentBefore && !n.comment && !n.tag)); }); } /** * Checks if the collection includes a value with the key `key`. */ hasIn(path) { const [key, ...rest] = path; if (rest.length === 0) return this.has(key); const node = this.get(key, true); return isCollection$1(node) ? node.hasIn(rest) : false; } /** * Sets a value in this collection. For `!!set`, `value` needs to be a * boolean to add/remove the item from the set. */ setIn(path, value) { const [key, ...rest] = path; if (rest.length === 0) { this.set(key, value); } else { const node = this.get(key, true); if (isCollection$1(node)) node.setIn(rest, value); else if (node === undefined && this.schema) this.set(key, collectionFromPath(this.schema, rest, value)); else throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); } } } Collection.maxFlowStringSingleLineLength = 60; /** * Stringifies a comment. * * Empty comment lines are left empty, * lines consisting of a single space are replaced by `#`, * and all other lines are prefixed with a `#`. */ const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#'); function indentComment(comment, indent) { if (/^\n+$/.test(comment)) return comment.substring(1); return indent ? comment.replace(/^(?! *$)/gm, indent) : comment; } const lineComment = (str, indent, comment) => str.endsWith('\n') ? indentComment(comment, indent) : comment.includes('\n') ? '\n' + indentComment(comment, indent) : (str.endsWith(' ') ? '' : ' ') + comment; const FOLD_FLOW = 'flow'; const FOLD_BLOCK = 'block'; const FOLD_QUOTED = 'quoted'; /** * Tries to keep input at up to `lineWidth` characters, splitting only on spaces * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are * terminated with `\n` and started with `indent`. */ function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) { if (!lineWidth || lineWidth < 0) return text; const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length); if (text.length <= endStep) return text; const folds = []; const escapedFolds = {}; let end = lineWidth - indent.length; if (typeof indentAtStart === 'number') { if (indentAtStart > lineWidth - Math.max(2, minContentWidth)) folds.push(0); else end = lineWidth - indentAtStart; } let split = undefined; let prev = undefined; let overflow = false; let i = -1; let escStart = -1; let escEnd = -1; if (mode === FOLD_BLOCK) { i = consumeMoreIndentedLines(text, i); if (i !== -1) end = i + endStep; } for (let ch; (ch = text[(i += 1)]);) { if (mode === FOLD_QUOTED && ch === '\\') { escStart = i; switch (text[i + 1]) { case 'x': i += 3; break; case 'u': i += 5; break; case 'U': i += 9; break; default: i += 1; } escEnd = i; } if (ch === '\n') { if (mode === FOLD_BLOCK) i = consumeMoreIndentedLines(text, i); end = i + endStep; split = undefined; } else { if (ch === ' ' && prev && prev !== ' ' && prev !== '\n' && prev !== '\t') { // space surrounded by non-space can be replaced with newline + indent const next = text[i + 1]; if (next && next !== ' ' && next !== '\n' && next !== '\t') split = i; } if (i >= end) { if (split) { folds.push(split); end = split + endStep; split = undefined; } else if (mode === FOLD_QUOTED) { // white-space collected at end may stretch past lineWidth while (prev === ' ' || prev === '\t') { prev = ch; ch = text[(i += 1)]; overflow = true; } // Account for newline escape, but don't break preceding escape const j = i > escEnd + 1 ? i - 2 : escStart - 1; // Bail out if lineWidth & minContentWidth are shorter than an escape string if (escapedFolds[j]) return text; folds.push(j); escapedFolds[j] = true; end = j + endStep; split = undefined; } else { overflow = true; } } } prev = ch; } if (overflow && onOverflow) onOverflow(); if (folds.length === 0) return text; if (onFold) onFold(); let res = text.slice(0, folds[0]); for (let i = 0; i < folds.length; ++i) { const fold = folds[i]; const end = folds[i + 1] || text.length; if (fold === 0) res = `\n${indent}${text.slice(0, end)}`; else { if (mode === FOLD_QUOTED && escapedFolds[fold]) res += `${text[fold]}\\`; res += `\n${indent}${text.slice(fold + 1, end)}`; } } return res; } /** * Presumes `i + 1` is at the start of a line * @returns index of last newline in more-indented block */ function consumeMoreIndentedLines(text, i) { let ch = text[i + 1]; while (ch === ' ' || ch === '\t') { do { ch = text[(i += 1)]; } while (ch && ch !== '\n'); ch = text[i + 1]; } return i; } const getFoldOptions = (ctx, isBlock) => ({ indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart, lineWidth: ctx.options.lineWidth, minContentWidth: ctx.options.minContentWidth }); // Also checks for lines starting with %, as parsing the output as YAML 1.1 will // presume that's starting a new document. const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str); function lineLengthOverLimit(str, lineWidth, indentLength) { if (!lineWidth || lineWidth < 0) return false; const limit = lineWidth - indentLength; const strLen = str.length; if (strLen <= limit) return false; for (let i = 0, start = 0; i < strLen; ++i) { if (str[i] === '\n') { if (i - start > limit) return true; start = i + 1; if (strLen - start <= limit) return false; } } return true; } function doubleQuotedString(value, ctx) { const json = JSON.stringify(value); if (ctx.options.doubleQuotedAsJSON) return json; const { implicitKey } = ctx; const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength; const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); let str = ''; let start = 0; for (let i = 0, ch = json[i]; ch; ch = json[++i]) { if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') { // space before newline needs to be escaped to not be folded str += json.slice(start, i) + '\\ '; i += 1; start = i; ch = '\\'; } if (ch === '\\') switch (json[i + 1]) { case 'u': { str += json.slice(start, i); const code = json.substr(i + 2, 4); switch (code) { case '0000': str += '\\0'; break; case '0007': str += '\\a'; break; case '000b': str += '\\v'; break; case '001b': str += '\\e'; break; case '0085': str += '\\N'; break; case '00a0': str += '\\_'; break; case '2028': str += '\\L'; break; case '2029': str += '\\P'; break; default: if (code.substr(0, 2) === '00') str += '\\x' + code.substr(2); else str += json.substr(i, 6); } i += 5; start = i + 1; } break; case 'n': if (implicitKey || json[i + 2] === '"' || json.length < minMultiLineLength) { i += 1; } else { // folding will eat first newline str += json.slice(start, i) + '\n\n'; while (json[i + 2] === '\\' && json[i + 3] === 'n' && json[i + 4] !== '"') { str += '\n'; i += 2; } str += indent; // space after newline needs to be escaped to not be folded if (json[i + 2] === ' ') str += '\\'; i += 1; start = i + 1; } break; default: i += 1; } } str = start ? str + json.slice(start) : json; return implicitKey ? str : foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx, false)); } function singleQuotedString(value, ctx) { if (ctx.options.singleQuote === false || (ctx.implicitKey && value.includes('\n')) || /[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline ) return doubleQuotedString(value, ctx); const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'"; return ctx.implicitKey ? res : foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx, false)); } function quotedString(value, ctx) { const { singleQuote } = ctx.options; let qs; if (singleQuote === false) qs = doubleQuotedString; else { const hasDouble = value.includes('"'); const hasSingle = value.includes("'"); if (hasDouble && !hasSingle) qs = singleQuotedString; else if (hasSingle && !hasDouble) qs = doubleQuotedString; else qs = singleQuote ? singleQuotedString : doubleQuotedString; } return qs(value, ctx); } // The negative lookbehind avoids a polynomial search, // but isn't supported yet on Safari: https://caniuse.com/js-regexp-lookbehind let blockEndNewlines; try { blockEndNewlines = new RegExp('(^|(?\n'; // determine chomping from whitespace at value end let chomp; let endStart; for (endStart = value.length; endStart > 0; --endStart) { const ch = value[endStart - 1]; if (ch !== '\n' && ch !== '\t' && ch !== ' ') break; } let end = value.substring(endStart); const endNlPos = end.indexOf('\n'); if (endNlPos === -1) { chomp = '-'; // strip } else if (value === end || endNlPos !== end.length - 1) { chomp = '+'; // keep if (onChompKeep) onChompKeep(); } else { chomp = ''; // clip } if (end) { value = value.slice(0, -end.length); if (end[end.length - 1] === '\n') end = end.slice(0, -1); end = end.replace(blockEndNewlines, `$&${indent}`); } // determine indent indicator from whitespace at value start let startWithSpace = false; let startEnd; let startNlPos = -1; for (startEnd = 0; startEnd < value.length; ++startEnd) { const ch = value[startEnd]; if (ch === ' ') startWithSpace = true; else if (ch === '\n') startNlPos = startEnd; else break; } let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd); if (start) { value = value.substring(start.length); start = start.replace(/\n+/g, `$&${indent}`); } const indentSize = indent ? '2' : '1'; // root is at -1 let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp; if (comment) { header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' ')); if (onComment) onComment(); } if (literal) { value = value.replace(/\n+/g, `$&${indent}`); return `${header}\n${indent}${start}${value}${end}`; } value = value .replace(/\n+/g, '\n$&') .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent .replace(/\n+/g, `$&${indent}`); const body = foldFlowLines(`${start}${value}${end}`, indent, FOLD_BLOCK, getFoldOptions(ctx, true)); return `${header}\n${indent}${body}`; } function plainString(item, ctx, onComment, onChompKeep) { const { type, value } = item; const { actualString, implicitKey, indent, indentStep, inFlow } = ctx; if ((implicitKey && value.includes('\n')) || (inFlow && /[[\]{},]/.test(value))) { return quotedString(value, ctx); } if (!value || /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) { // not allowed: // - empty string, '-' or '?' // - start with an indicator character (except [?:-]) or /[?-] / // - '\n ', ': ' or ' \n' anywhere // - '#' not preceded by a non-space char // - end with ' ' or ':' return implicitKey || inFlow || !value.includes('\n') ? quotedString(value, ctx) : blockString(item, ctx, onComment, onChompKeep); } if (!implicitKey && !inFlow && type !== Scalar.PLAIN && value.includes('\n')) { // Where allowed & type not set explicitly, prefer block style for multiline strings return blockString(item, ctx, onComment, onChompKeep); } if (containsDocumentMarker(value)) { if (indent === '') { ctx.forceBlockIndent = true; return blockString(item, ctx, onComment, onChompKeep); } else if (implicitKey && indent === indentStep) { return quotedString(value, ctx); } } const str = value.replace(/\n+/g, `$&\n${indent}`); // Verify that output will be parsed as a string, as e.g. plain numbers and // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), // and others in v1.1. if (actualString) { const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str); const { compat, tags } = ctx.doc.schema; if (tags.some(test) || compat?.some(test)) return quotedString(value, ctx); } return implicitKey ? str : foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx, false)); } function stringifyString(item, ctx, onComment, onChompKeep) { const { implicitKey, inFlow } = ctx; const ss = typeof item.value === 'string' ? item : Object.assign({}, item, { value: String(item.value) }); let { type } = item; if (type !== Scalar.QUOTE_DOUBLE) { // force double quotes on control characters & unpaired surrogates if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value)) type = Scalar.QUOTE_DOUBLE; } const _stringify = (_type) => { switch (_type) { case Scalar.BLOCK_FOLDED: case Scalar.BLOCK_LITERAL: return implicitKey || inFlow ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers : blockString(ss, ctx, onComment, onChompKeep); case Scalar.QUOTE_DOUBLE: return doubleQuotedString(ss.value, ctx); case Scalar.QUOTE_SINGLE: return singleQuotedString(ss.value, ctx); case Scalar.PLAIN: return plainString(ss, ctx, onComment, onChompKeep); default: return null; } }; let res = _stringify(type); if (res === null) { const { defaultKeyType, defaultStringType } = ctx.options; const t = (implicitKey && defaultKeyType) || defaultStringType; res = _stringify(t); if (res === null) throw new Error(`Unsupported default string type ${t}`); } return res; } function createStringifyContext(doc, options) { const opt = Object.assign({ blockQuote: true, commentString: stringifyComment, defaultKeyType: null, defaultStringType: 'PLAIN', directives: null, doubleQuotedAsJSON: false, doubleQuotedMinMultiLineLength: 40, falseStr: 'false', flowCollectionPadding: true, indentSeq: true, lineWidth: 80, minContentWidth: 20, nullStr: 'null', simpleKeys: false, singleQuote: null, trueStr: 'true', verifyAliasOrder: true }, doc.schema.toStringOptions, options); let inFlow; switch (opt.collectionStyle) { case 'block': inFlow = false; break; case 'flow': inFlow = true; break; default: inFlow = null; } return { anchors: new Set(), doc, flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '', indent: '', indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ', inFlow, options: opt }; } function getTagObject(tags, item) { if (item.tag) { const match = tags.filter(t => t.tag === item.tag); if (match.length > 0) return match.find(t => t.format === item.format) ?? match[0]; } let tagObj = undefined; let obj; if (isScalar$1(item)) { obj = item.value; const match = tags.filter(t => t.identify?.(obj)); tagObj = match.find(t => t.format === item.format) ?? match.find(t => !t.format); } else { obj = item; tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass); } if (!tagObj) { const name = obj?.constructor?.name ?? typeof obj; throw new Error(`Tag not resolved for ${name} value`); } return tagObj; } // needs to be called before value stringifier to allow for circular anchor refs function stringifyProps(node, tagObj, { anchors, doc }) { if (!doc.directives) return ''; const props = []; const anchor = (isScalar$1(node) || isCollection$1(node)) && node.anchor; if (anchor && anchorIsValid(anchor)) { anchors.add(anchor); props.push(`&${anchor}`); } const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag; if (tag) props.push(doc.directives.tagString(tag)); return props.join(' '); } function stringify$2(item, ctx, onComment, onChompKeep) { if (isPair(item)) return item.toString(ctx, onComment, onChompKeep); if (isAlias(item)) { if (ctx.doc.directives) return item.toString(ctx); if (ctx.resolvedAliases?.has(item)) { throw new TypeError(`Cannot stringify circular structure without alias nodes`); } else { if (ctx.resolvedAliases) ctx.resolvedAliases.add(item); else ctx.resolvedAliases = new Set([item]); item = item.resolve(ctx.doc); } } let tagObj = undefined; const node = isNode$1(item) ? item : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) }); if (!tagObj) tagObj = getTagObject(ctx.doc.schema.tags, node); const props = stringifyProps(node, tagObj, ctx); if (props.length > 0) ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1; const str = typeof tagObj.stringify === 'function' ? tagObj.stringify(node, ctx, onComment, onChompKeep) : isScalar$1(node) ? stringifyString(node, ctx, onComment, onChompKeep) : node.toString(ctx, onComment, onChompKeep); if (!props) return str; return isScalar$1(node) || str[0] === '{' || str[0] === '[' ? `${props} ${str}` : `${props}\n${ctx.indent}${str}`; } function stringifyPair({ key, value }, ctx, onComment, onChompKeep) { const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx; let keyComment = (isNode$1(key) && key.comment) || null; if (simpleKeys) { if (keyComment) { throw new Error('With simple keys, key nodes cannot have comments'); } if (isCollection$1(key)) { const msg = 'With simple keys, collection cannot be used as a key value'; throw new Error(msg); } } let explicitKey = !simpleKeys && (!key || (keyComment && value == null && !ctx.inFlow) || isCollection$1(key) || (isScalar$1(key) ? key.type === Scalar.BLOCK_FOLDED || key.type === Scalar.BLOCK_LITERAL : typeof key === 'object')); ctx = Object.assign({}, ctx, { allNullValues: false, implicitKey: !explicitKey && (simpleKeys || !allNullValues), indent: indent + indentStep }); let keyCommentDone = false; let chompKeep = false; let str = stringify$2(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true)); if (!explicitKey && !ctx.inFlow && str.length > 1024) { if (simpleKeys) throw new Error('With simple keys, single line scalar must not span more than 1024 characters'); explicitKey = true; } if (ctx.inFlow) { if (allNullValues || value == null) { if (keyCommentDone && onComment) onComment(); return str === '' ? '?' : explicitKey ? `? ${str}` : str; } } else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) { str = `? ${str}`; if (keyComment && !keyCommentDone) { str += lineComment(str, ctx.indent, commentString(keyComment)); } else if (chompKeep && onChompKeep) onChompKeep(); return str; } if (keyCommentDone) keyComment = null; if (explicitKey) { if (keyComment) str += lineComment(str, ctx.indent, commentString(keyComment)); str = `? ${str}\n${indent}:`; } else { str = `${str}:`; if (keyComment) str += lineComment(str, ctx.indent, commentString(keyComment)); } let vsb, vcb, valueComment; if (isNode$1(value)) { vsb = !!value.spaceBefore; vcb = value.commentBefore; valueComment = value.comment; } else { vsb = false; vcb = null; valueComment = null; if (value && typeof value === 'object') value = doc.createNode(value); } ctx.implicitKey = false; if (!explicitKey && !keyComment && isScalar$1(value)) ctx.indentAtStart = str.length + 1; chompKeep = false; if (!indentSeq && indentStep.length >= 2 && !ctx.inFlow && !explicitKey && isSeq(value) && !value.flow && !value.tag && !value.anchor) { // If indentSeq === false, consider '- ' as part of indentation where possible ctx.indent = ctx.indent.substring(2); } let valueCommentDone = false; const valueStr = stringify$2(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true)); let ws = ' '; if (keyComment || vsb || vcb) { ws = vsb ? '\n' : ''; if (vcb) { const cs = commentString(vcb); ws += `\n${indentComment(cs, ctx.indent)}`; } if (valueStr === '' && !ctx.inFlow) { if (ws === '\n') ws = '\n\n'; } else { ws += `\n${ctx.indent}`; } } else if (!explicitKey && isCollection$1(value)) { const vs0 = valueStr[0]; const nl0 = valueStr.indexOf('\n'); const hasNewline = nl0 !== -1; const flow = ctx.inFlow ?? value.flow ?? value.items.length === 0; if (hasNewline || !flow) { let hasPropsLine = false; if (hasNewline && (vs0 === '&' || vs0 === '!')) { let sp0 = valueStr.indexOf(' '); if (vs0 === '&' && sp0 !== -1 && sp0 < nl0 && valueStr[sp0 + 1] === '!') { sp0 = valueStr.indexOf(' ', sp0 + 1); } if (sp0 === -1 || nl0 < sp0) hasPropsLine = true; } if (!hasPropsLine) ws = `\n${ctx.indent}`; } } else if (valueStr === '' || valueStr[0] === '\n') { ws = ''; } str += ws + valueStr; if (ctx.inFlow) { if (valueCommentDone && onComment) onComment(); } else if (valueComment && !valueCommentDone) { str += lineComment(str, ctx.indent, commentString(valueComment)); } else if (chompKeep && onChompKeep) { onChompKeep(); } return str; } function warn(logLevel, warning) { if (logLevel === 'debug' || logLevel === 'warn') { // https://github.com/typescript-eslint/typescript-eslint/issues/7478 // eslint-disable-next-line @typescript-eslint/prefer-optional-chain if (typeof process !== 'undefined' && process.emitWarning) process.emitWarning(warning); else console.warn(warning); } } const MERGE_KEY = '<<'; function addPairToJSMap(ctx, map, { key, value }) { if (ctx?.doc.schema.merge && isMergeKey(key)) { value = isAlias(value) ? value.resolve(ctx.doc) : value; if (isSeq(value)) for (const it of value.items) mergeToJSMap(ctx, map, it); else if (Array.isArray(value)) for (const it of value) mergeToJSMap(ctx, map, it); else mergeToJSMap(ctx, map, value); } else { const jsKey = toJS(key, '', ctx); if (map instanceof Map) { map.set(jsKey, toJS(value, jsKey, ctx)); } else if (map instanceof Set) { map.add(jsKey); } else { const stringKey = stringifyKey(key, jsKey, ctx); const jsValue = toJS(value, stringKey, ctx); if (stringKey in map) Object.defineProperty(map, stringKey, { value: jsValue, writable: true, enumerable: true, configurable: true }); else map[stringKey] = jsValue; } } return map; } const isMergeKey = (key) => key === MERGE_KEY || (isScalar$1(key) && key.value === MERGE_KEY && (!key.type || key.type === Scalar.PLAIN)); // If the value associated with a merge key is a single mapping node, each of // its key/value pairs is inserted into the current mapping, unless the key // already exists in it. If the value associated with the merge key is a // sequence, then this sequence is expected to contain mapping nodes and each // of these nodes is merged in turn according to its order in the sequence. // Keys in mapping nodes earlier in the sequence override keys specified in // later mapping nodes. -- http://yaml.org/type/merge.html function mergeToJSMap(ctx, map, value) { const source = ctx && isAlias(value) ? value.resolve(ctx.doc) : value; if (!isMap(source)) throw new Error('Merge sources must be maps or map aliases'); const srcMap = source.toJSON(null, ctx, Map); for (const [key, value] of srcMap) { if (map instanceof Map) { if (!map.has(key)) map.set(key, value); } else if (map instanceof Set) { map.add(key); } else if (!Object.prototype.hasOwnProperty.call(map, key)) { Object.defineProperty(map, key, { value, writable: true, enumerable: true, configurable: true }); } } return map; } function stringifyKey(key, jsKey, ctx) { if (jsKey === null) return ''; if (typeof jsKey !== 'object') return String(jsKey); if (isNode$1(key) && ctx?.doc) { const strCtx = createStringifyContext(ctx.doc, {}); strCtx.anchors = new Set(); for (const node of ctx.anchors.keys()) strCtx.anchors.add(node.anchor); strCtx.inFlow = true; strCtx.inStringifyKey = true; const strKey = key.toString(strCtx); if (!ctx.mapKeyWarned) { let jsonStr = JSON.stringify(strKey); if (jsonStr.length > 40) jsonStr = jsonStr.substring(0, 36) + '..."'; warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`); ctx.mapKeyWarned = true; } return strKey; } return JSON.stringify(jsKey); } function createPair(key, value, ctx) { const k = createNode(key, undefined, ctx); const v = createNode(value, undefined, ctx); return new Pair(k, v); } class Pair { constructor(key, value = null) { Object.defineProperty(this, NODE_TYPE, { value: PAIR }); this.key = key; this.value = value; } clone(schema) { let { key, value } = this; if (isNode$1(key)) key = key.clone(schema); if (isNode$1(value)) value = value.clone(schema); return new Pair(key, value); } toJSON(_, ctx) { const pair = ctx?.mapAsMap ? new Map() : {}; return addPairToJSMap(ctx, pair, this); } toString(ctx, onComment, onChompKeep) { return ctx?.doc ? stringifyPair(this, ctx, onComment, onChompKeep) : JSON.stringify(this); } } function stringifyCollection(collection, ctx, options) { const flow = ctx.inFlow ?? collection.flow; const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection; return stringify(collection, ctx, options); } function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) { const { indent, options: { commentString } } = ctx; const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null }); let chompKeep = false; // flag for the preceding node's status const lines = []; for (let i = 0; i < items.length; ++i) { const item = items[i]; let comment = null; if (isNode$1(item)) { if (!chompKeep && item.spaceBefore) lines.push(''); addCommentBefore(ctx, lines, item.commentBefore, chompKeep); if (item.comment) comment = item.comment; } else if (isPair(item)) { const ik = isNode$1(item.key) ? item.key : null; if (ik) { if (!chompKeep && ik.spaceBefore) lines.push(''); addCommentBefore(ctx, lines, ik.commentBefore, chompKeep); } } chompKeep = false; let str = stringify$2(item, itemCtx, () => (comment = null), () => (chompKeep = true)); if (comment) str += lineComment(str, itemIndent, commentString(comment)); if (chompKeep && comment) chompKeep = false; lines.push(blockItemPrefix + str); } let str; if (lines.length === 0) { str = flowChars.start + flowChars.end; } else { str = lines[0]; for (let i = 1; i < lines.length; ++i) { const line = lines[i]; str += line ? `\n${indent}${line}` : '\n'; } } if (comment) { str += '\n' + indentComment(commentString(comment), indent); if (onComment) onComment(); } else if (chompKeep && onChompKeep) onChompKeep(); return str; } function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) { const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx; itemIndent += indentStep; const itemCtx = Object.assign({}, ctx, { indent: itemIndent, inFlow: true, type: null }); let reqNewline = false; let linesAtValue = 0; const lines = []; for (let i = 0; i < items.length; ++i) { const item = items[i]; let comment = null; if (isNode$1(item)) { if (item.spaceBefore) lines.push(''); addCommentBefore(ctx, lines, item.commentBefore, false); if (item.comment) comment = item.comment; } else if (isPair(item)) { const ik = isNode$1(item.key) ? item.key : null; if (ik) { if (ik.spaceBefore) lines.push(''); addCommentBefore(ctx, lines, ik.commentBefore, false); if (ik.comment) reqNewline = true; } const iv = isNode$1(item.value) ? item.value : null; if (iv) { if (iv.comment) comment = iv.comment; if (iv.commentBefore) reqNewline = true; } else if (item.value == null && ik?.comment) { comment = ik.comment; } } if (comment) reqNewline = true; let str = stringify$2(item, itemCtx, () => (comment = null)); if (i < items.length - 1) str += ','; if (comment) str += lineComment(str, itemIndent, commentString(comment)); if (!reqNewline && (lines.length > linesAtValue || str.includes('\n'))) reqNewline = true; lines.push(str); linesAtValue = lines.length; } let str; const { start, end } = flowChars; if (lines.length === 0) { str = start + end; } else { if (!reqNewline) { const len = lines.reduce((sum, line) => sum + line.length + 2, 2); reqNewline = len > Collection.maxFlowStringSingleLineLength; } if (reqNewline) { str = start; for (const line of lines) str += line ? `\n${indentStep}${indent}${line}` : '\n'; str += `\n${indent}${end}`; } else { str = `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`; } } if (comment) { str += lineComment(str, indent, commentString(comment)); if (onComment) onComment(); } return str; } function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) { if (comment && chompKeep) comment = comment.replace(/^\n+/, ''); if (comment) { const ic = indentComment(commentString(comment), indent); lines.push(ic.trimStart()); // Avoid double indent on first line } } function findPair(items, key) { const k = isScalar$1(key) ? key.value : key; for (const it of items) { if (isPair(it)) { if (it.key === key || it.key === k) return it; if (isScalar$1(it.key) && it.key.value === k) return it; } } return undefined; } class YAMLMap extends Collection { static get tagName() { return 'tag:yaml.org,2002:map'; } constructor(schema) { super(MAP, schema); this.items = []; } /** * A generic collection parsing method that can be extended * to other node classes that inherit from YAMLMap */ static from(schema, obj, ctx) { const { keepUndefined, replacer } = ctx; const map = new this(schema); const add = (key, value) => { if (typeof replacer === 'function') value = replacer.call(obj, key, value); else if (Array.isArray(replacer) && !replacer.includes(key)) return; if (value !== undefined || keepUndefined) map.items.push(createPair(key, value, ctx)); }; if (obj instanceof Map) { for (const [key, value] of obj) add(key, value); } else if (obj && typeof obj === 'object') { for (const key of Object.keys(obj)) add(key, obj[key]); } if (typeof schema.sortMapEntries === 'function') { map.items.sort(schema.sortMapEntries); } return map; } /** * Adds a value to the collection. * * @param overwrite - If not set `true`, using a key that is already in the * collection will throw. Otherwise, overwrites the previous value. */ add(pair, overwrite) { let _pair; if (isPair(pair)) _pair = pair; else if (!pair || typeof pair !== 'object' || !('key' in pair)) { // In TypeScript, this never happens. _pair = new Pair(pair, pair?.value); } else _pair = new Pair(pair.key, pair.value); const prev = findPair(this.items, _pair.key); const sortEntries = this.schema?.sortMapEntries; if (prev) { if (!overwrite) throw new Error(`Key ${_pair.key} already set`); // For scalars, keep the old node & its comments and anchors if (isScalar$1(prev.value) && isScalarValue(_pair.value)) prev.value.value = _pair.value; else prev.value = _pair.value; } else if (sortEntries) { const i = this.items.findIndex(item => sortEntries(_pair, item) < 0); if (i === -1) this.items.push(_pair); else this.items.splice(i, 0, _pair); } else { this.items.push(_pair); } } delete(key) { const it = findPair(this.items, key); if (!it) return false; const del = this.items.splice(this.items.indexOf(it), 1); return del.length > 0; } get(key, keepScalar) { const it = findPair(this.items, key); const node = it?.value; return (!keepScalar && isScalar$1(node) ? node.value : node) ?? undefined; } has(key) { return !!findPair(this.items, key); } set(key, value) { this.add(new Pair(key, value), true); } /** * @param ctx - Conversion context, originally set in Document#toJS() * @param {Class} Type - If set, forces the returned collection type * @returns Instance of Type, Map, or Object */ toJSON(_, ctx, Type) { const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {}; if (ctx?.onCreate) ctx.onCreate(map); for (const item of this.items) addPairToJSMap(ctx, map, item); return map; } toString(ctx, onComment, onChompKeep) { if (!ctx) return JSON.stringify(this); for (const item of this.items) { if (!isPair(item)) throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`); } if (!ctx.allNullValues && this.hasAllNullValues(false)) ctx = Object.assign({}, ctx, { allNullValues: true }); return stringifyCollection(this, ctx, { blockItemPrefix: '', flowChars: { start: '{', end: '}' }, itemIndent: ctx.indent || '', onChompKeep, onComment }); } } const map$1 = { collection: 'map', default: true, nodeClass: YAMLMap, tag: 'tag:yaml.org,2002:map', resolve(map, onError) { if (!isMap(map)) onError('Expected a mapping for this tag'); return map; }, createNode: (schema, obj, ctx) => YAMLMap.from(schema, obj, ctx) }; class YAMLSeq extends Collection { static get tagName() { return 'tag:yaml.org,2002:seq'; } constructor(schema) { super(SEQ, schema); this.items = []; } add(value) { this.items.push(value); } /** * Removes a value from the collection. * * `key` must contain a representation of an integer for this to succeed. * It may be wrapped in a `Scalar`. * * @returns `true` if the item was found and removed. */ delete(key) { const idx = asItemIndex(key); if (typeof idx !== 'number') return false; const del = this.items.splice(idx, 1); return del.length > 0; } get(key, keepScalar) { const idx = asItemIndex(key); if (typeof idx !== 'number') return undefined; const it = this.items[idx]; return !keepScalar && isScalar$1(it) ? it.value : it; } /** * Checks if the collection includes a value with the key `key`. * * `key` must contain a representation of an integer for this to succeed. * It may be wrapped in a `Scalar`. */ has(key) { const idx = asItemIndex(key); return typeof idx === 'number' && idx < this.items.length; } /** * Sets a value in this collection. For `!!set`, `value` needs to be a * boolean to add/remove the item from the set. * * If `key` does not contain a representation of an integer, this will throw. * It may be wrapped in a `Scalar`. */ set(key, value) { const idx = asItemIndex(key); if (typeof idx !== 'number') throw new Error(`Expected a valid index, not ${key}.`); const prev = this.items[idx]; if (isScalar$1(prev) && isScalarValue(value)) prev.value = value; else this.items[idx] = value; } toJSON(_, ctx) { const seq = []; if (ctx?.onCreate) ctx.onCreate(seq); let i = 0; for (const item of this.items) seq.push(toJS(item, String(i++), ctx)); return seq; } toString(ctx, onComment, onChompKeep) { if (!ctx) return JSON.stringify(this); return stringifyCollection(this, ctx, { blockItemPrefix: '- ', flowChars: { start: '[', end: ']' }, itemIndent: (ctx.indent || '') + ' ', onChompKeep, onComment }); } static from(schema, obj, ctx) { const { replacer } = ctx; const seq = new this(schema); if (obj && Symbol.iterator in Object(obj)) { let i = 0; for (let it of obj) { if (typeof replacer === 'function') { const key = obj instanceof Set ? it : String(i++); it = replacer.call(obj, key, it); } seq.items.push(createNode(it, undefined, ctx)); } } return seq; } } function asItemIndex(key) { let idx = isScalar$1(key) ? key.value : key; if (idx && typeof idx === 'string') idx = Number(idx); return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0 ? idx : null; } const seq = { collection: 'seq', default: true, nodeClass: YAMLSeq, tag: 'tag:yaml.org,2002:seq', resolve(seq, onError) { if (!isSeq(seq)) onError('Expected a sequence for this tag'); return seq; }, createNode: (schema, obj, ctx) => YAMLSeq.from(schema, obj, ctx) }; const string = { identify: value => typeof value === 'string', default: true, tag: 'tag:yaml.org,2002:str', resolve: str => str, stringify(item, ctx, onComment, onChompKeep) { ctx = Object.assign({ actualString: true }, ctx); return stringifyString(item, ctx, onComment, onChompKeep); } }; const nullTag = { identify: value => value == null, createNode: () => new Scalar(null), default: true, tag: 'tag:yaml.org,2002:null', test: /^(?:~|[Nn]ull|NULL)?$/, resolve: () => new Scalar(null), stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source) ? source : ctx.options.nullStr }; const boolTag = { identify: value => typeof value === 'boolean', default: true, tag: 'tag:yaml.org,2002:bool', test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/, resolve: str => new Scalar(str[0] === 't' || str[0] === 'T'), stringify({ source, value }, ctx) { if (source && boolTag.test.test(source)) { const sv = source[0] === 't' || source[0] === 'T'; if (value === sv) return source; } return value ? ctx.options.trueStr : ctx.options.falseStr; } }; function stringifyNumber({ format, minFractionDigits, tag, value }) { if (typeof value === 'bigint') return String(value); const num = typeof value === 'number' ? value : Number(value); if (!isFinite(num)) return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf'; let n = JSON.stringify(value); if (!format && minFractionDigits && (!tag || tag === 'tag:yaml.org,2002:float') && /^\d/.test(n)) { let i = n.indexOf('.'); if (i < 0) { i = n.length; n += '.'; } let d = minFractionDigits - (n.length - i - 1); while (d-- > 0) n += '0'; } return n; } const floatNaN$1 = { identify: value => typeof value === 'number', default: true, tag: 'tag:yaml.org,2002:float', test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/, resolve: str => str.slice(-3).toLowerCase() === 'nan' ? NaN : str[0] === '-' ? Number.NEGATIVE_INFINITY : Number.POSITIVE_INFINITY, stringify: stringifyNumber }; const floatExp$1 = { identify: value => typeof value === 'number', default: true, tag: 'tag:yaml.org,2002:float', format: 'EXP', test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/, resolve: str => parseFloat(str), stringify(node) { const num = Number(node.value); return isFinite(num) ? num.toExponential() : stringifyNumber(node); } }; const float$1 = { identify: value => typeof value === 'number', default: true, tag: 'tag:yaml.org,2002:float', test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/, resolve(str) { const node = new Scalar(parseFloat(str)); const dot = str.indexOf('.'); if (dot !== -1 && str[str.length - 1] === '0') node.minFractionDigits = str.length - dot - 1; return node; }, stringify: stringifyNumber }; const intIdentify$2 = (value) => typeof value === 'bigint' || Number.isInteger(value); const intResolve$1 = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix)); function intStringify$1(node, radix, prefix) { const { value } = node; if (intIdentify$2(value) && value >= 0) return prefix + value.toString(radix); return stringifyNumber(node); } const intOct$1 = { identify: value => intIdentify$2(value) && value >= 0, default: true, tag: 'tag:yaml.org,2002:int', format: 'OCT', test: /^0o[0-7]+$/, resolve: (str, _onError, opt) => intResolve$1(str, 2, 8, opt), stringify: node => intStringify$1(node, 8, '0o') }; const int$1 = { identify: intIdentify$2, default: true, tag: 'tag:yaml.org,2002:int', test: /^[-+]?[0-9]+$/, resolve: (str, _onError, opt) => intResolve$1(str, 0, 10, opt), stringify: stringifyNumber }; const intHex$1 = { identify: value => intIdentify$2(value) && value >= 0, default: true, tag: 'tag:yaml.org,2002:int', format: 'HEX', test: /^0x[0-9a-fA-F]+$/, resolve: (str, _onError, opt) => intResolve$1(str, 2, 16, opt), stringify: node => intStringify$1(node, 16, '0x') }; const schema$2 = [ map$1, seq, string, nullTag, boolTag, intOct$1, int$1, intHex$1, floatNaN$1, floatExp$1, float$1 ]; function intIdentify$1(value) { return typeof value === 'bigint' || Number.isInteger(value); } const stringifyJSON = ({ value }) => JSON.stringify(value); const jsonScalars = [ { identify: value => typeof value === 'string', default: true, tag: 'tag:yaml.org,2002:str', resolve: str => str, stringify: stringifyJSON }, { identify: value => value == null, createNode: () => new Scalar(null), default: true, tag: 'tag:yaml.org,2002:null', test: /^null$/, resolve: () => null, stringify: stringifyJSON }, { identify: value => typeof value === 'boolean', default: true, tag: 'tag:yaml.org,2002:bool', test: /^true|false$/, resolve: str => str === 'true', stringify: stringifyJSON }, { identify: intIdentify$1, default: true, tag: 'tag:yaml.org,2002:int', test: /^-?(?:0|[1-9][0-9]*)$/, resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10), stringify: ({ value }) => intIdentify$1(value) ? value.toString() : JSON.stringify(value) }, { identify: value => typeof value === 'number', default: true, tag: 'tag:yaml.org,2002:float', test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/, resolve: str => parseFloat(str), stringify: stringifyJSON } ]; const jsonError = { default: true, tag: '', test: /^/, resolve(str, onError) { onError(`Unresolved plain scalar ${JSON.stringify(str)}`); return str; } }; const schema$1 = [map$1, seq].concat(jsonScalars, jsonError); const binary = { identify: value => value instanceof Uint8Array, default: false, tag: 'tag:yaml.org,2002:binary', /** * Returns a Buffer in node and an Uint8Array in browsers * * To use the resulting buffer as an image, you'll want to do something like: * * const blob = new Blob([buffer], { type: 'image/jpeg' }) * document.querySelector('#photo').src = URL.createObjectURL(blob) */ resolve(src, onError) { if (typeof Buffer === 'function') { return Buffer.from(src, 'base64'); } else if (typeof atob === 'function') { // On IE 11, atob() can't handle newlines const str = atob(src.replace(/[\n\r]/g, '')); const buffer = new Uint8Array(str.length); for (let i = 0; i < str.length; ++i) buffer[i] = str.charCodeAt(i); return buffer; } else { onError('This environment does not support reading binary tags; either Buffer or atob is required'); return src; } }, stringify({ comment, type, value }, ctx, onComment, onChompKeep) { const buf = value; // checked earlier by binary.identify() let str; if (typeof Buffer === 'function') { str = buf instanceof Buffer ? buf.toString('base64') : Buffer.from(buf.buffer).toString('base64'); } else if (typeof btoa === 'function') { let s = ''; for (let i = 0; i < buf.length; ++i) s += String.fromCharCode(buf[i]); str = btoa(s); } else { throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required'); } if (!type) type = Scalar.BLOCK_LITERAL; if (type !== Scalar.QUOTE_DOUBLE) { const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth); const n = Math.ceil(str.length / lineWidth); const lines = new Array(n); for (let i = 0, o = 0; i < n; ++i, o += lineWidth) { lines[i] = str.substr(o, lineWidth); } str = lines.join(type === Scalar.BLOCK_LITERAL ? '\n' : ' '); } return stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep); } }; function resolvePairs(seq, onError) { if (isSeq(seq)) { for (let i = 0; i < seq.items.length; ++i) { let item = seq.items[i]; if (isPair(item)) continue; else if (isMap(item)) { if (item.items.length > 1) onError('Each pair must have its own sequence indicator'); const pair = item.items[0] || new Pair(new Scalar(null)); if (item.commentBefore) pair.key.commentBefore = pair.key.commentBefore ? `${item.commentBefore}\n${pair.key.commentBefore}` : item.commentBefore; if (item.comment) { const cn = pair.value ?? pair.key; cn.comment = cn.comment ? `${item.comment}\n${cn.comment}` : item.comment; } item = pair; } seq.items[i] = isPair(item) ? item : new Pair(item); } } else onError('Expected a sequence for this tag'); return seq; } function createPairs(schema, iterable, ctx) { const { replacer } = ctx; const pairs = new YAMLSeq(schema); pairs.tag = 'tag:yaml.org,2002:pairs'; let i = 0; if (iterable && Symbol.iterator in Object(iterable)) for (let it of iterable) { if (typeof replacer === 'function') it = replacer.call(iterable, String(i++), it); let key, value; if (Array.isArray(it)) { if (it.length === 2) { key = it[0]; value = it[1]; } else throw new TypeError(`Expected [key, value] tuple: ${it}`); } else if (it && it instanceof Object) { const keys = Object.keys(it); if (keys.length === 1) { key = keys[0]; value = it[key]; } else { throw new TypeError(`Expected tuple with one key, not ${keys.length} keys`); } } else { key = it; } pairs.items.push(createPair(key, value, ctx)); } return pairs; } const pairs = { collection: 'seq', default: false, tag: 'tag:yaml.org,2002:pairs', resolve: resolvePairs, createNode: createPairs }; class YAMLOMap extends YAMLSeq { constructor() { super(); this.add = YAMLMap.prototype.add.bind(this); this.delete = YAMLMap.prototype.delete.bind(this); this.get = YAMLMap.prototype.get.bind(this); this.has = YAMLMap.prototype.has.bind(this); this.set = YAMLMap.prototype.set.bind(this); this.tag = YAMLOMap.tag; } /** * If `ctx` is given, the return type is actually `Map`, * but TypeScript won't allow widening the signature of a child method. */ toJSON(_, ctx) { if (!ctx) return super.toJSON(_); const map = new Map(); if (ctx?.onCreate) ctx.onCreate(map); for (const pair of this.items) { let key, value; if (isPair(pair)) { key = toJS(pair.key, '', ctx); value = toJS(pair.value, key, ctx); } else { key = toJS(pair, '', ctx); } if (map.has(key)) throw new Error('Ordered maps must not include duplicate keys'); map.set(key, value); } return map; } static from(schema, iterable, ctx) { const pairs = createPairs(schema, iterable, ctx); const omap = new this(); omap.items = pairs.items; return omap; } } YAMLOMap.tag = 'tag:yaml.org,2002:omap'; const omap = { collection: 'seq', identify: value => value instanceof Map, nodeClass: YAMLOMap, default: false, tag: 'tag:yaml.org,2002:omap', resolve(seq, onError) { const pairs = resolvePairs(seq, onError); const seenKeys = []; for (const { key } of pairs.items) { if (isScalar$1(key)) { if (seenKeys.includes(key.value)) { onError(`Ordered maps must not include duplicate keys: ${key.value}`); } else { seenKeys.push(key.value); } } } return Object.assign(new YAMLOMap(), pairs); }, createNode: (schema, iterable, ctx) => YAMLOMap.from(schema, iterable, ctx) }; function boolStringify({ value, source }, ctx) { const boolObj = value ? trueTag : falseTag; if (source && boolObj.test.test(source)) return source; return value ? ctx.options.trueStr : ctx.options.falseStr; } const trueTag = { identify: value => value === true, default: true, tag: 'tag:yaml.org,2002:bool', test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/, resolve: () => new Scalar(true), stringify: boolStringify }; const falseTag = { identify: value => value === false, default: true, tag: 'tag:yaml.org,2002:bool', test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i, resolve: () => new Scalar(false), stringify: boolStringify }; const floatNaN = { identify: value => typeof value === 'number', default: true, tag: 'tag:yaml.org,2002:float', test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/, resolve: (str) => str.slice(-3).toLowerCase() === 'nan' ? NaN : str[0] === '-' ? Number.NEGATIVE_INFINITY : Number.POSITIVE_INFINITY, stringify: stringifyNumber }; const floatExp = { identify: value => typeof value === 'number', default: true, tag: 'tag:yaml.org,2002:float', format: 'EXP', test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/, resolve: (str) => parseFloat(str.replace(/_/g, '')), stringify(node) { const num = Number(node.value); return isFinite(num) ? num.toExponential() : stringifyNumber(node); } }; const float = { identify: value => typeof value === 'number', default: true, tag: 'tag:yaml.org,2002:float', test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/, resolve(str) { const node = new Scalar(parseFloat(str.replace(/_/g, ''))); const dot = str.indexOf('.'); if (dot !== -1) { const f = str.substring(dot + 1).replace(/_/g, ''); if (f[f.length - 1] === '0') node.minFractionDigits = f.length; } return node; }, stringify: stringifyNumber }; const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); function intResolve(str, offset, radix, { intAsBigInt }) { const sign = str[0]; if (sign === '-' || sign === '+') offset += 1; str = str.substring(offset).replace(/_/g, ''); if (intAsBigInt) { switch (radix) { case 2: str = `0b${str}`; break; case 8: str = `0o${str}`; break; case 16: str = `0x${str}`; break; } const n = BigInt(str); return sign === '-' ? BigInt(-1) * n : n; } const n = parseInt(str, radix); return sign === '-' ? -1 * n : n; } function intStringify(node, radix, prefix) { const { value } = node; if (intIdentify(value)) { const str = value.toString(radix); return value < 0 ? '-' + prefix + str.substr(1) : prefix + str; } return stringifyNumber(node); } const intBin = { identify: intIdentify, default: true, tag: 'tag:yaml.org,2002:int', format: 'BIN', test: /^[-+]?0b[0-1_]+$/, resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt), stringify: node => intStringify(node, 2, '0b') }; const intOct = { identify: intIdentify, default: true, tag: 'tag:yaml.org,2002:int', format: 'OCT', test: /^[-+]?0[0-7_]+$/, resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt), stringify: node => intStringify(node, 8, '0') }; const int = { identify: intIdentify, default: true, tag: 'tag:yaml.org,2002:int', test: /^[-+]?[0-9][0-9_]*$/, resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), stringify: stringifyNumber }; const intHex = { identify: intIdentify, default: true, tag: 'tag:yaml.org,2002:int', format: 'HEX', test: /^[-+]?0x[0-9a-fA-F_]+$/, resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), stringify: node => intStringify(node, 16, '0x') }; class YAMLSet extends YAMLMap { constructor(schema) { super(schema); this.tag = YAMLSet.tag; } add(key) { let pair; if (isPair(key)) pair = key; else if (key && typeof key === 'object' && 'key' in key && 'value' in key && key.value === null) pair = new Pair(key.key, null); else pair = new Pair(key, null); const prev = findPair(this.items, pair.key); if (!prev) this.items.push(pair); } /** * If `keepPair` is `true`, returns the Pair matching `key`. * Otherwise, returns the value of that Pair's key. */ get(key, keepPair) { const pair = findPair(this.items, key); return !keepPair && isPair(pair) ? isScalar$1(pair.key) ? pair.key.value : pair.key : pair; } set(key, value) { if (typeof value !== 'boolean') throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`); const prev = findPair(this.items, key); if (prev && !value) { this.items.splice(this.items.indexOf(prev), 1); } else if (!prev && value) { this.items.push(new Pair(key)); } } toJSON(_, ctx) { return super.toJSON(_, ctx, Set); } toString(ctx, onComment, onChompKeep) { if (!ctx) return JSON.stringify(this); if (this.hasAllNullValues(true)) return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep); else throw new Error('Set items must all have null values'); } static from(schema, iterable, ctx) { const { replacer } = ctx; const set = new this(schema); if (iterable && Symbol.iterator in Object(iterable)) for (let value of iterable) { if (typeof replacer === 'function') value = replacer.call(iterable, value, value); set.items.push(createPair(value, null, ctx)); } return set; } } YAMLSet.tag = 'tag:yaml.org,2002:set'; const set = { collection: 'map', identify: value => value instanceof Set, nodeClass: YAMLSet, default: false, tag: 'tag:yaml.org,2002:set', createNode: (schema, iterable, ctx) => YAMLSet.from(schema, iterable, ctx), resolve(map, onError) { if (isMap(map)) { if (map.hasAllNullValues(true)) return Object.assign(new YAMLSet(), map); else onError('Set items must all have null values'); } else onError('Expected a mapping for this tag'); return map; } }; /** Internal types handle bigint as number, because TS can't figure it out. */ function parseSexagesimal(str, asBigInt) { const sign = str[0]; const parts = sign === '-' || sign === '+' ? str.substring(1) : str; const num = (n) => asBigInt ? BigInt(n) : Number(n); const res = parts .replace(/_/g, '') .split(':') .reduce((res, p) => res * num(60) + num(p), num(0)); return (sign === '-' ? num(-1) * res : res); } /** * hhhh:mm:ss.sss * * Internal types handle bigint as number, because TS can't figure it out. */ function stringifySexagesimal(node) { let { value } = node; let num = (n) => n; if (typeof value === 'bigint') num = n => BigInt(n); else if (isNaN(value) || !isFinite(value)) return stringifyNumber(node); let sign = ''; if (value < 0) { sign = '-'; value *= num(-1); } const _60 = num(60); const parts = [value % _60]; // seconds, including ms if (value < 60) { parts.unshift(0); // at least one : is required } else { value = (value - parts[0]) / _60; parts.unshift(value % _60); // minutes if (value >= 60) { value = (value - parts[0]) / _60; parts.unshift(value); // hours } } return (sign + parts .map(n => String(n).padStart(2, '0')) .join(':') .replace(/000000\d*$/, '') // % 60 may introduce error ); } const intTime = { identify: value => typeof value === 'bigint' || Number.isInteger(value), default: true, tag: 'tag:yaml.org,2002:int', format: 'TIME', test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/, resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt), stringify: stringifySexagesimal }; const floatTime = { identify: value => typeof value === 'number', default: true, tag: 'tag:yaml.org,2002:float', format: 'TIME', test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/, resolve: str => parseSexagesimal(str, false), stringify: stringifySexagesimal }; const timestamp = { identify: value => value instanceof Date, default: true, tag: 'tag:yaml.org,2002:timestamp', // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part // may be omitted altogether, resulting in a date format. In such a case, the time part is // assumed to be 00:00:00Z (start of day, UTC). test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd '(?:' + // time is optional '(?:t|T|[ \\t]+)' + // t | T | whitespace '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)? '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30 ')?$'), resolve(str) { const match = str.match(timestamp.test); if (!match) throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd'); const [, year, month, day, hour, minute, second] = match.map(Number); const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0; let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec); const tz = match[8]; if (tz && tz !== 'Z') { let d = parseSexagesimal(tz, false); if (Math.abs(d) < 30) d *= 60; date -= 60000 * d; } return new Date(date); }, stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '') }; const schema = [ map$1, seq, string, nullTag, trueTag, falseTag, intBin, intOct, int, intHex, floatNaN, floatExp, float, binary, omap, pairs, set, intTime, floatTime, timestamp ]; const schemas = new Map([ ['core', schema$2], ['failsafe', [map$1, seq, string]], ['json', schema$1], ['yaml11', schema], ['yaml-1.1', schema] ]); const tagsByName = { binary, bool: boolTag, float: float$1, floatExp: floatExp$1, floatNaN: floatNaN$1, floatTime, int: int$1, intHex: intHex$1, intOct: intOct$1, intTime, map: map$1, null: nullTag, omap, pairs, seq, set, timestamp }; const coreKnownTags = { 'tag:yaml.org,2002:binary': binary, 'tag:yaml.org,2002:omap': omap, 'tag:yaml.org,2002:pairs': pairs, 'tag:yaml.org,2002:set': set, 'tag:yaml.org,2002:timestamp': timestamp }; function getTags(customTags, schemaName) { let tags = schemas.get(schemaName); if (!tags) { if (Array.isArray(customTags)) tags = []; else { const keys = Array.from(schemas.keys()) .filter(key => key !== 'yaml11') .map(key => JSON.stringify(key)) .join(', '); throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`); } } if (Array.isArray(customTags)) { for (const tag of customTags) tags = tags.concat(tag); } else if (typeof customTags === 'function') { tags = customTags(tags.slice()); } return tags.map(tag => { if (typeof tag !== 'string') return tag; const tagObj = tagsByName[tag]; if (tagObj) return tagObj; const keys = Object.keys(tagsByName) .map(key => JSON.stringify(key)) .join(', '); throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`); }); } const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0; class Schema { constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) { this.compat = Array.isArray(compat) ? getTags(compat, 'compat') : compat ? getTags(null, compat) : null; this.merge = !!merge; this.name = (typeof schema === 'string' && schema) || 'core'; this.knownTags = resolveKnownTags ? coreKnownTags : {}; this.tags = getTags(customTags, this.name); this.toStringOptions = toStringDefaults ?? null; Object.defineProperty(this, MAP, { value: map$1 }); Object.defineProperty(this, SCALAR$1, { value: string }); Object.defineProperty(this, SEQ, { value: seq }); // Used by createMap() this.sortMapEntries = typeof sortMapEntries === 'function' ? sortMapEntries : sortMapEntries === true ? sortMapEntriesByKey : null; } clone() { const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this)); copy.tags = this.tags.slice(); return copy; } } function stringifyDocument(doc, options) { const lines = []; let hasDirectives = options.directives === true; if (options.directives !== false && doc.directives) { const dir = doc.directives.toString(doc); if (dir) { lines.push(dir); hasDirectives = true; } else if (doc.directives.docStart) hasDirectives = true; } if (hasDirectives) lines.push('---'); const ctx = createStringifyContext(doc, options); const { commentString } = ctx.options; if (doc.commentBefore) { if (lines.length !== 1) lines.unshift(''); const cs = commentString(doc.commentBefore); lines.unshift(indentComment(cs, '')); } let chompKeep = false; let contentComment = null; if (doc.contents) { if (isNode$1(doc.contents)) { if (doc.contents.spaceBefore && hasDirectives) lines.push(''); if (doc.contents.commentBefore) { const cs = commentString(doc.contents.commentBefore); lines.push(indentComment(cs, '')); } // top-level block scalars need to be indented if followed by a comment ctx.forceBlockIndent = !!doc.comment; contentComment = doc.contents.comment; } const onChompKeep = contentComment ? undefined : () => (chompKeep = true); let body = stringify$2(doc.contents, ctx, () => (contentComment = null), onChompKeep); if (contentComment) body += lineComment(body, '', commentString(contentComment)); if ((body[0] === '|' || body[0] === '>') && lines[lines.length - 1] === '---') { // Top-level block scalars with a preceding doc marker ought to use the // same line for their header. lines[lines.length - 1] = `--- ${body}`; } else lines.push(body); } else { lines.push(stringify$2(doc.contents, ctx)); } if (doc.directives?.docEnd) { if (doc.comment) { const cs = commentString(doc.comment); if (cs.includes('\n')) { lines.push('...'); lines.push(indentComment(cs, '')); } else { lines.push(`... ${cs}`); } } else { lines.push('...'); } } else { let dc = doc.comment; if (dc && chompKeep) dc = dc.replace(/^\n+/, ''); if (dc) { if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') lines.push(''); lines.push(indentComment(commentString(dc), '')); } } return lines.join('\n') + '\n'; } class Document { constructor(value, replacer, options) { /** A comment before this Document */ this.commentBefore = null; /** A comment immediately after this Document */ this.comment = null; /** Errors encountered during parsing. */ this.errors = []; /** Warnings encountered during parsing. */ this.warnings = []; Object.defineProperty(this, NODE_TYPE, { value: DOC }); let _replacer = null; if (typeof replacer === 'function' || Array.isArray(replacer)) { _replacer = replacer; } else if (options === undefined && replacer) { options = replacer; replacer = undefined; } const opt = Object.assign({ intAsBigInt: false, keepSourceTokens: false, logLevel: 'warn', prettyErrors: true, strict: true, uniqueKeys: true, version: '1.2' }, options); this.options = opt; let { version } = opt; if (options?._directives) { this.directives = options._directives.atDocument(); if (this.directives.yaml.explicit) version = this.directives.yaml.version; } else this.directives = new Directives({ version }); this.setSchema(version, options); // @ts-expect-error We can't really know that this matches Contents. this.contents = value === undefined ? null : this.createNode(value, _replacer, options); } /** * Create a deep copy of this Document and its contents. * * Custom Node values that inherit from `Object` still refer to their original instances. */ clone() { const copy = Object.create(Document.prototype, { [NODE_TYPE]: { value: DOC } }); copy.commentBefore = this.commentBefore; copy.comment = this.comment; copy.errors = this.errors.slice(); copy.warnings = this.warnings.slice(); copy.options = Object.assign({}, this.options); if (this.directives) copy.directives = this.directives.clone(); copy.schema = this.schema.clone(); // @ts-expect-error We can't really know that this matches Contents. copy.contents = isNode$1(this.contents) ? this.contents.clone(copy.schema) : this.contents; if (this.range) copy.range = this.range.slice(); return copy; } /** Adds a value to the document. */ add(value) { if (assertCollection(this.contents)) this.contents.add(value); } /** Adds a value to the document. */ addIn(path, value) { if (assertCollection(this.contents)) this.contents.addIn(path, value); } /** * Create a new `Alias` node, ensuring that the target `node` has the required anchor. * * If `node` already has an anchor, `name` is ignored. * Otherwise, the `node.anchor` value will be set to `name`, * or if an anchor with that name is already present in the document, * `name` will be used as a prefix for a new unique anchor. * If `name` is undefined, the generated anchor will use 'a' as a prefix. */ createAlias(node, name) { if (!node.anchor) { const prev = anchorNames(this); node.anchor = // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing !name || prev.has(name) ? findNewAnchor(name || 'a', prev) : name; } return new Alias(node.anchor); } createNode(value, replacer, options) { let _replacer = undefined; if (typeof replacer === 'function') { value = replacer.call({ '': value }, '', value); _replacer = replacer; } else if (Array.isArray(replacer)) { const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number; const asStr = replacer.filter(keyToStr).map(String); if (asStr.length > 0) replacer = replacer.concat(asStr); _replacer = replacer; } else if (options === undefined && replacer) { options = replacer; replacer = undefined; } const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {}; const { onAnchor, setAnchors, sourceObjects } = createNodeAnchors(this, // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing anchorPrefix || 'a'); const ctx = { aliasDuplicateObjects: aliasDuplicateObjects ?? true, keepUndefined: keepUndefined ?? false, onAnchor, onTagObj, replacer: _replacer, schema: this.schema, sourceObjects }; const node = createNode(value, tag, ctx); if (flow && isCollection$1(node)) node.flow = true; setAnchors(); return node; } /** * Convert a key and a value into a `Pair` using the current schema, * recursively wrapping all values as `Scalar` or `Collection` nodes. */ createPair(key, value, options = {}) { const k = this.createNode(key, null, options); const v = this.createNode(value, null, options); return new Pair(k, v); } /** * Removes a value from the document. * @returns `true` if the item was found and removed. */ delete(key) { return assertCollection(this.contents) ? this.contents.delete(key) : false; } /** * Removes a value from the document. * @returns `true` if the item was found and removed. */ deleteIn(path) { if (isEmptyPath(path)) { if (this.contents == null) return false; // @ts-expect-error Presumed impossible if Strict extends false this.contents = null; return true; } return assertCollection(this.contents) ? this.contents.deleteIn(path) : false; } /** * Returns item at `key`, or `undefined` if not found. By default unwraps * scalar values from their surrounding node; to disable set `keepScalar` to * `true` (collections are always returned intact). */ get(key, keepScalar) { return isCollection$1(this.contents) ? this.contents.get(key, keepScalar) : undefined; } /** * Returns item at `path`, or `undefined` if not found. By default unwraps * scalar values from their surrounding node; to disable set `keepScalar` to * `true` (collections are always returned intact). */ getIn(path, keepScalar) { if (isEmptyPath(path)) return !keepScalar && isScalar$1(this.contents) ? this.contents.value : this.contents; return isCollection$1(this.contents) ? this.contents.getIn(path, keepScalar) : undefined; } /** * Checks if the document includes a value with the key `key`. */ has(key) { return isCollection$1(this.contents) ? this.contents.has(key) : false; } /** * Checks if the document includes a value at `path`. */ hasIn(path) { if (isEmptyPath(path)) return this.contents !== undefined; return isCollection$1(this.contents) ? this.contents.hasIn(path) : false; } /** * Sets a value in this document. For `!!set`, `value` needs to be a * boolean to add/remove the item from the set. */ set(key, value) { if (this.contents == null) { // @ts-expect-error We can't really know that this matches Contents. this.contents = collectionFromPath(this.schema, [key], value); } else if (assertCollection(this.contents)) { this.contents.set(key, value); } } /** * Sets a value in this document. For `!!set`, `value` needs to be a * boolean to add/remove the item from the set. */ setIn(path, value) { if (isEmptyPath(path)) { // @ts-expect-error We can't really know that this matches Contents. this.contents = value; } else if (this.contents == null) { // @ts-expect-error We can't really know that this matches Contents. this.contents = collectionFromPath(this.schema, Array.from(path), value); } else if (assertCollection(this.contents)) { this.contents.setIn(path, value); } } /** * Change the YAML version and schema used by the document. * A `null` version disables support for directives, explicit tags, anchors, and aliases. * It also requires the `schema` option to be given as a `Schema` instance value. * * Overrides all previously set schema options. */ setSchema(version, options = {}) { if (typeof version === 'number') version = String(version); let opt; switch (version) { case '1.1': if (this.directives) this.directives.yaml.version = '1.1'; else this.directives = new Directives({ version: '1.1' }); opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' }; break; case '1.2': case 'next': if (this.directives) this.directives.yaml.version = version; else this.directives = new Directives({ version }); opt = { merge: false, resolveKnownTags: true, schema: 'core' }; break; case null: if (this.directives) delete this.directives; opt = null; break; default: { const sv = JSON.stringify(version); throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`); } } // Not using `instanceof Schema` to allow for duck typing if (options.schema instanceof Object) this.schema = options.schema; else if (opt) this.schema = new Schema(Object.assign(opt, options)); else throw new Error(`With a null YAML version, the { schema: Schema } option is required`); } // json & jsonArg are only used from toJSON() toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { const ctx = { anchors: new Map(), doc: this, keep: !json, mapAsMap: mapAsMap === true, mapKeyWarned: false, maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100 }; const res = toJS(this.contents, jsonArg ?? '', ctx); if (typeof onAnchor === 'function') for (const { count, res } of ctx.anchors.values()) onAnchor(res, count); return typeof reviver === 'function' ? applyReviver(reviver, { '': res }, '', res) : res; } /** * A JSON representation of the document `contents`. * * @param jsonArg Used by `JSON.stringify` to indicate the array index or * property name. */ toJSON(jsonArg, onAnchor) { return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor }); } /** A YAML representation of the document. */ toString(options = {}) { if (this.errors.length > 0) throw new Error('Document with errors cannot be stringified'); if ('indent' in options && (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) { const s = JSON.stringify(options.indent); throw new Error(`"indent" option must be a positive integer, not ${s}`); } return stringifyDocument(this, options); } } function assertCollection(contents) { if (isCollection$1(contents)) return true; throw new Error('Expected a YAML collection as document contents'); } class YAMLError extends Error { constructor(name, pos, code, message) { super(); this.name = name; this.code = code; this.message = message; this.pos = pos; } } class YAMLParseError extends YAMLError { constructor(pos, code, message) { super('YAMLParseError', pos, code, message); } } class YAMLWarning extends YAMLError { constructor(pos, code, message) { super('YAMLWarning', pos, code, message); } } const prettifyError = (src, lc) => (error) => { if (error.pos[0] === -1) return; error.linePos = error.pos.map(pos => lc.linePos(pos)); const { line, col } = error.linePos[0]; error.message += ` at line ${line}, column ${col}`; let ci = col - 1; let lineStr = src .substring(lc.lineStarts[line - 1], lc.lineStarts[line]) .replace(/[\n\r]+$/, ''); // Trim to max 80 chars, keeping col position near the middle if (ci >= 60 && lineStr.length > 80) { const trimStart = Math.min(ci - 39, lineStr.length - 79); lineStr = '…' + lineStr.substring(trimStart); ci -= trimStart - 1; } if (lineStr.length > 80) lineStr = lineStr.substring(0, 79) + '…'; // Include previous line in context if pointing at line start if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) { // Regexp won't match if start is trimmed let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]); if (prev.length > 80) prev = prev.substring(0, 79) + '…\n'; lineStr = prev + lineStr; } if (/[^ ]/.test(lineStr)) { let count = 1; const end = error.linePos[1]; if (end && end.line === line && end.col > col) { count = Math.max(1, Math.min(end.col - col, 80 - ci)); } const pointer = ' '.repeat(ci) + '^'.repeat(count); error.message += `:\n\n${lineStr}\n${pointer}\n`; } }; function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) { let spaceBefore = false; let atNewline = startOnNewline; let hasSpace = startOnNewline; let comment = ''; let commentSep = ''; let hasNewline = false; let hasNewlineAfterProp = false; let reqSpace = false; let anchor = null; let tag = null; let comma = null; let found = null; let start = null; for (const token of tokens) { if (reqSpace) { if (token.type !== 'space' && token.type !== 'newline' && token.type !== 'comma') onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); reqSpace = false; } switch (token.type) { case 'space': // At the doc level, tabs at line start may be parsed // as leading white space rather than indentation. // In a flow collection, only the parser handles indent. if (!flow && atNewline && indicator !== 'doc-start' && token.source[0] === '\t') onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation'); hasSpace = true; break; case 'comment': { if (!hasSpace) onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); const cb = token.source.substring(1) || ' '; if (!comment) comment = cb; else comment += commentSep + cb; commentSep = ''; atNewline = false; break; } case 'newline': if (atNewline) { if (comment) comment += token.source; else spaceBefore = true; } else commentSep += token.source; atNewline = true; hasNewline = true; if (anchor || tag) hasNewlineAfterProp = true; hasSpace = true; break; case 'anchor': if (anchor) onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor'); if (token.source.endsWith(':')) onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true); anchor = token; if (start === null) start = token.offset; atNewline = false; hasSpace = false; reqSpace = true; break; case 'tag': { if (tag) onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag'); tag = token; if (start === null) start = token.offset; atNewline = false; hasSpace = false; reqSpace = true; break; } case indicator: // Could here handle preceding comments differently if (anchor || tag) onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`); if (found) onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`); found = token; atNewline = false; hasSpace = false; break; case 'comma': if (flow) { if (comma) onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`); comma = token; atNewline = false; hasSpace = false; break; } // else fallthrough default: onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`); atNewline = false; hasSpace = false; } } const last = tokens[tokens.length - 1]; const end = last ? last.offset + last.source.length : offset; if (reqSpace && next && next.type !== 'space' && next.type !== 'newline' && next.type !== 'comma' && (next.type !== 'scalar' || next.source !== '')) onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); return { comma, found, spaceBefore, comment, hasNewline, hasNewlineAfterProp, anchor, tag, end, start: start ?? end }; } function containsNewline(key) { if (!key) return null; switch (key.type) { case 'alias': case 'scalar': case 'double-quoted-scalar': case 'single-quoted-scalar': if (key.source.includes('\n')) return true; if (key.end) for (const st of key.end) if (st.type === 'newline') return true; return false; case 'flow-collection': for (const it of key.items) { for (const st of it.start) if (st.type === 'newline') return true; if (it.sep) for (const st of it.sep) if (st.type === 'newline') return true; if (containsNewline(it.key) || containsNewline(it.value)) return true; } return false; default: return true; } } function flowIndentCheck(indent, fc, onError) { if (fc?.type === 'flow-collection') { const end = fc.end[0]; if (end.indent === indent && (end.source === ']' || end.source === '}') && containsNewline(fc)) { const msg = 'Flow end indicator should be more indented than parent'; onError(end, 'BAD_INDENT', msg, true); } } } function mapIncludes(ctx, items, search) { const { uniqueKeys } = ctx.options; if (uniqueKeys === false) return false; const isEqual = typeof uniqueKeys === 'function' ? uniqueKeys : (a, b) => a === b || (isScalar$1(a) && isScalar$1(b) && a.value === b.value && !(a.value === '<<' && ctx.schema.merge)); return items.some(pair => isEqual(pair.key, search)); } const startColMsg = 'All mapping items must start at the same column'; function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, tag) { const NodeClass = tag?.nodeClass ?? YAMLMap; const map = new NodeClass(ctx.schema); if (ctx.atRoot) ctx.atRoot = false; let offset = bm.offset; let commentEnd = null; for (const collItem of bm.items) { const { start, key, sep, value } = collItem; // key properties const keyProps = resolveProps(start, { indicator: 'explicit-key-ind', next: key ?? sep?.[0], offset, onError, startOnNewline: true }); const implicitKey = !keyProps.found; if (implicitKey) { if (key) { if (key.type === 'block-seq') onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key'); else if ('indent' in key && key.indent !== bm.indent) onError(offset, 'BAD_INDENT', startColMsg); } if (!keyProps.anchor && !keyProps.tag && !sep) { commentEnd = keyProps.end; if (keyProps.comment) { if (map.comment) map.comment += '\n' + keyProps.comment; else map.comment = keyProps.comment; } continue; } if (keyProps.hasNewlineAfterProp || containsNewline(key)) { onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line'); } } else if (keyProps.found?.indent !== bm.indent) { onError(offset, 'BAD_INDENT', startColMsg); } // key value const keyStart = keyProps.end; const keyNode = key ? composeNode(ctx, key, keyProps, onError) : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError); if (ctx.schema.compat) flowIndentCheck(bm.indent, key, onError); if (mapIncludes(ctx, map.items, keyNode)) onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); // value properties const valueProps = resolveProps(sep ?? [], { indicator: 'map-value-ind', next: value, offset: keyNode.range[2], onError, startOnNewline: !key || key.type === 'block-scalar' }); offset = valueProps.end; if (valueProps.found) { if (implicitKey) { if (value?.type === 'block-map' && !valueProps.hasNewline) onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings'); if (ctx.options.strict && keyProps.start < valueProps.found.offset - 1024) onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key'); } // value value const valueNode = value ? composeNode(ctx, value, valueProps, onError) : composeEmptyNode(ctx, offset, sep, null, valueProps, onError); if (ctx.schema.compat) flowIndentCheck(bm.indent, value, onError); offset = valueNode.range[2]; const pair = new Pair(keyNode, valueNode); if (ctx.options.keepSourceTokens) pair.srcToken = collItem; map.items.push(pair); } else { // key with no value if (implicitKey) onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values'); if (valueProps.comment) { if (keyNode.comment) keyNode.comment += '\n' + valueProps.comment; else keyNode.comment = valueProps.comment; } const pair = new Pair(keyNode); if (ctx.options.keepSourceTokens) pair.srcToken = collItem; map.items.push(pair); } } if (commentEnd && commentEnd < offset) onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content'); map.range = [bm.offset, offset, commentEnd ?? offset]; return map; } function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, tag) { const NodeClass = tag?.nodeClass ?? YAMLSeq; const seq = new NodeClass(ctx.schema); if (ctx.atRoot) ctx.atRoot = false; let offset = bs.offset; let commentEnd = null; for (const { start, value } of bs.items) { const props = resolveProps(start, { indicator: 'seq-item-ind', next: value, offset, onError, startOnNewline: true }); if (!props.found) { if (props.anchor || props.tag || value) { if (value && value.type === 'block-seq') onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column'); else onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator'); } else { commentEnd = props.end; if (props.comment) seq.comment = props.comment; continue; } } const node = value ? composeNode(ctx, value, props, onError) : composeEmptyNode(ctx, props.end, start, null, props, onError); if (ctx.schema.compat) flowIndentCheck(bs.indent, value, onError); offset = node.range[2]; seq.items.push(node); } seq.range = [bs.offset, offset, commentEnd ?? offset]; return seq; } function resolveEnd(end, offset, reqSpace, onError) { let comment = ''; if (end) { let hasSpace = false; let sep = ''; for (const token of end) { const { source, type } = token; switch (type) { case 'space': hasSpace = true; break; case 'comment': { if (reqSpace && !hasSpace) onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); const cb = source.substring(1) || ' '; if (!comment) comment = cb; else comment += sep + cb; sep = ''; break; } case 'newline': if (comment) sep += source; hasSpace = true; break; default: onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`); } offset += source.length; } } return { comment, offset }; } const blockMsg = 'Block collections are not allowed within flow collections'; const isBlock$1 = (token) => token && (token.type === 'block-map' || token.type === 'block-seq'); function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError, tag) { const isMap = fc.start.source === '{'; const fcName = isMap ? 'flow map' : 'flow sequence'; const NodeClass = (tag?.nodeClass ?? (isMap ? YAMLMap : YAMLSeq)); const coll = new NodeClass(ctx.schema); coll.flow = true; const atRoot = ctx.atRoot; if (atRoot) ctx.atRoot = false; let offset = fc.offset + fc.start.source.length; for (let i = 0; i < fc.items.length; ++i) { const collItem = fc.items[i]; const { start, key, sep, value } = collItem; const props = resolveProps(start, { flow: fcName, indicator: 'explicit-key-ind', next: key ?? sep?.[0], offset, onError, startOnNewline: false }); if (!props.found) { if (!props.anchor && !props.tag && !sep && !value) { if (i === 0 && props.comma) onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); else if (i < fc.items.length - 1) onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`); if (props.comment) { if (coll.comment) coll.comment += '\n' + props.comment; else coll.comment = props.comment; } offset = props.end; continue; } if (!isMap && ctx.options.strict && containsNewline(key)) onError(key, // checked by containsNewline() 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); } if (i === 0) { if (props.comma) onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); } else { if (!props.comma) onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`); if (props.comment) { let prevItemComment = ''; loop: for (const st of start) { switch (st.type) { case 'comma': case 'space': break; case 'comment': prevItemComment = st.source.substring(1); break loop; default: break loop; } } if (prevItemComment) { let prev = coll.items[coll.items.length - 1]; if (isPair(prev)) prev = prev.value ?? prev.key; if (prev.comment) prev.comment += '\n' + prevItemComment; else prev.comment = prevItemComment; props.comment = props.comment.substring(prevItemComment.length + 1); } } } if (!isMap && !sep && !props.found) { // item is a value in a seq // → key & sep are empty, start does not include ? or : const valueNode = value ? composeNode(ctx, value, props, onError) : composeEmptyNode(ctx, props.end, sep, null, props, onError); coll.items.push(valueNode); offset = valueNode.range[2]; if (isBlock$1(value)) onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); } else { // item is a key+value pair // key value const keyStart = props.end; const keyNode = key ? composeNode(ctx, key, props, onError) : composeEmptyNode(ctx, keyStart, start, null, props, onError); if (isBlock$1(key)) onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg); // value properties const valueProps = resolveProps(sep ?? [], { flow: fcName, indicator: 'map-value-ind', next: value, offset: keyNode.range[2], onError, startOnNewline: false }); if (valueProps.found) { if (!isMap && !props.found && ctx.options.strict) { if (sep) for (const st of sep) { if (st === valueProps.found) break; if (st.type === 'newline') { onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); break; } } if (props.start < valueProps.found.offset - 1024) onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key'); } } else if (value) { if ('source' in value && value.source && value.source[0] === ':') onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`); else onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`); } // value value const valueNode = value ? composeNode(ctx, value, valueProps, onError) : valueProps.found ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) : null; if (valueNode) { if (isBlock$1(value)) onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); } else if (valueProps.comment) { if (keyNode.comment) keyNode.comment += '\n' + valueProps.comment; else keyNode.comment = valueProps.comment; } const pair = new Pair(keyNode, valueNode); if (ctx.options.keepSourceTokens) pair.srcToken = collItem; if (isMap) { const map = coll; if (mapIncludes(ctx, map.items, keyNode)) onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); map.items.push(pair); } else { const map = new YAMLMap(ctx.schema); map.flow = true; map.items.push(pair); coll.items.push(map); } offset = valueNode ? valueNode.range[2] : valueProps.end; } } const expectedEnd = isMap ? '}' : ']'; const [ce, ...ee] = fc.end; let cePos = offset; if (ce && ce.source === expectedEnd) cePos = ce.offset + ce.source.length; else { const name = fcName[0].toUpperCase() + fcName.substring(1); const msg = atRoot ? `${name} must end with a ${expectedEnd}` : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`; onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg); if (ce && ce.source.length !== 1) ee.unshift(ce); } if (ee.length > 0) { const end = resolveEnd(ee, cePos, ctx.options.strict, onError); if (end.comment) { if (coll.comment) coll.comment += '\n' + end.comment; else coll.comment = end.comment; } coll.range = [fc.offset, cePos, end.offset]; } else { coll.range = [fc.offset, cePos, cePos]; } return coll; } function resolveCollection(CN, ctx, token, onError, tagName, tag) { const coll = token.type === 'block-map' ? resolveBlockMap(CN, ctx, token, onError, tag) : token.type === 'block-seq' ? resolveBlockSeq(CN, ctx, token, onError, tag) : resolveFlowCollection(CN, ctx, token, onError, tag); const Coll = coll.constructor; // If we got a tagName matching the class, or the tag name is '!', // then use the tagName from the node class used to create it. if (tagName === '!' || tagName === Coll.tagName) { coll.tag = Coll.tagName; return coll; } if (tagName) coll.tag = tagName; return coll; } function composeCollection(CN, ctx, token, tagToken, onError) { const tagName = !tagToken ? null : ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)); const expType = token.type === 'block-map' ? 'map' : token.type === 'block-seq' ? 'seq' : token.start.source === '{' ? 'map' : 'seq'; // shortcut: check if it's a generic YAMLMap or YAMLSeq // before jumping into the custom tag logic. if (!tagToken || !tagName || tagName === '!' || (tagName === YAMLMap.tagName && expType === 'map') || (tagName === YAMLSeq.tagName && expType === 'seq') || !expType) { return resolveCollection(CN, ctx, token, onError, tagName); } let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType); if (!tag) { const kt = ctx.schema.knownTags[tagName]; if (kt && kt.collection === expType) { ctx.schema.tags.push(Object.assign({}, kt, { default: false })); tag = kt; } else { if (kt?.collection) { onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection}`, true); } else { onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true); } return resolveCollection(CN, ctx, token, onError, tagName); } } const coll = resolveCollection(CN, ctx, token, onError, tagName, tag); const res = tag.resolve?.(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options) ?? coll; const node = isNode$1(res) ? res : new Scalar(res); node.range = coll.range; node.tag = tagName; if (tag?.format) node.format = tag.format; return node; } function resolveBlockScalar(scalar, strict, onError) { const start = scalar.offset; const header = parseBlockScalarHeader(scalar, strict, onError); if (!header) return { value: '', type: null, comment: '', range: [start, start, start] }; const type = header.mode === '>' ? Scalar.BLOCK_FOLDED : Scalar.BLOCK_LITERAL; const lines = scalar.source ? splitLines(scalar.source) : []; // determine the end of content & start of chomping let chompStart = lines.length; for (let i = lines.length - 1; i >= 0; --i) { const content = lines[i][1]; if (content === '' || content === '\r') chompStart = i; else break; } // shortcut for empty contents if (chompStart === 0) { const value = header.chomp === '+' && lines.length > 0 ? '\n'.repeat(Math.max(1, lines.length - 1)) : ''; let end = start + header.length; if (scalar.source) end += scalar.source.length; return { value, type, comment: header.comment, range: [start, end, end] }; } // find the indentation level to trim from start let trimIndent = scalar.indent + header.indent; let offset = scalar.offset + header.length; let contentStart = 0; for (let i = 0; i < chompStart; ++i) { const [indent, content] = lines[i]; if (content === '' || content === '\r') { if (header.indent === 0 && indent.length > trimIndent) trimIndent = indent.length; } else { if (indent.length < trimIndent) { const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; onError(offset + indent.length, 'MISSING_CHAR', message); } if (header.indent === 0) trimIndent = indent.length; contentStart = i; break; } offset += indent.length + content.length + 1; } // include trailing more-indented empty lines in content for (let i = lines.length - 1; i >= chompStart; --i) { if (lines[i][0].length > trimIndent) chompStart = i + 1; } let value = ''; let sep = ''; let prevMoreIndented = false; // leading whitespace is kept intact for (let i = 0; i < contentStart; ++i) value += lines[i][0].slice(trimIndent) + '\n'; for (let i = contentStart; i < chompStart; ++i) { let [indent, content] = lines[i]; offset += indent.length + content.length + 1; const crlf = content[content.length - 1] === '\r'; if (crlf) content = content.slice(0, -1); /* istanbul ignore if already caught in lexer */ if (content && indent.length < trimIndent) { const src = header.indent ? 'explicit indentation indicator' : 'first line'; const message = `Block scalar lines must not be less indented than their ${src}`; onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message); indent = ''; } if (type === Scalar.BLOCK_LITERAL) { value += sep + indent.slice(trimIndent) + content; sep = '\n'; } else if (indent.length > trimIndent || content[0] === '\t') { // more-indented content within a folded block if (sep === ' ') sep = '\n'; else if (!prevMoreIndented && sep === '\n') sep = '\n\n'; value += sep + indent.slice(trimIndent) + content; sep = '\n'; prevMoreIndented = true; } else if (content === '') { // empty line if (sep === '\n') value += '\n'; else sep = '\n'; } else { value += sep + content; sep = ' '; prevMoreIndented = false; } } switch (header.chomp) { case '-': break; case '+': for (let i = chompStart; i < lines.length; ++i) value += '\n' + lines[i][0].slice(trimIndent); if (value[value.length - 1] !== '\n') value += '\n'; break; default: value += '\n'; } const end = start + header.length + scalar.source.length; return { value, type, comment: header.comment, range: [start, end, end] }; } function parseBlockScalarHeader({ offset, props }, strict, onError) { /* istanbul ignore if should not happen */ if (props[0].type !== 'block-scalar-header') { onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found'); return null; } const { source } = props[0]; const mode = source[0]; let indent = 0; let chomp = ''; let error = -1; for (let i = 1; i < source.length; ++i) { const ch = source[i]; if (!chomp && (ch === '-' || ch === '+')) chomp = ch; else { const n = Number(ch); if (!indent && n) indent = n; else if (error === -1) error = offset + i; } } if (error !== -1) onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`); let hasSpace = false; let comment = ''; let length = source.length; for (let i = 1; i < props.length; ++i) { const token = props[i]; switch (token.type) { case 'space': hasSpace = true; // fallthrough case 'newline': length += token.source.length; break; case 'comment': if (strict && !hasSpace) { const message = 'Comments must be separated from other tokens by white space characters'; onError(token, 'MISSING_CHAR', message); } length += token.source.length; comment = token.source.substring(1); break; case 'error': onError(token, 'UNEXPECTED_TOKEN', token.message); length += token.source.length; break; /* istanbul ignore next should not happen */ default: { const message = `Unexpected token in block scalar header: ${token.type}`; onError(token, 'UNEXPECTED_TOKEN', message); const ts = token.source; if (ts && typeof ts === 'string') length += ts.length; } } } return { mode, indent, chomp, comment, length }; } /** @returns Array of lines split up as `[indent, content]` */ function splitLines(source) { const split = source.split(/\n( *)/); const first = split[0]; const m = first.match(/^( *)/); const line0 = m?.[1] ? [m[1], first.slice(m[1].length)] : ['', first]; const lines = [line0]; for (let i = 1; i < split.length; i += 2) lines.push([split[i], split[i + 1]]); return lines; } function resolveFlowScalar(scalar, strict, onError) { const { offset, type, source, end } = scalar; let _type; let value; const _onError = (rel, code, msg) => onError(offset + rel, code, msg); switch (type) { case 'scalar': _type = Scalar.PLAIN; value = plainValue(source, _onError); break; case 'single-quoted-scalar': _type = Scalar.QUOTE_SINGLE; value = singleQuotedValue(source, _onError); break; case 'double-quoted-scalar': _type = Scalar.QUOTE_DOUBLE; value = doubleQuotedValue(source, _onError); break; /* istanbul ignore next should not happen */ default: onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`); return { value: '', type: null, comment: '', range: [offset, offset + source.length, offset + source.length] }; } const valueEnd = offset + source.length; const re = resolveEnd(end, valueEnd, strict, onError); return { value, type: _type, comment: re.comment, range: [offset, valueEnd, re.offset] }; } function plainValue(source, onError) { let badChar = ''; switch (source[0]) { /* istanbul ignore next should not happen */ case '\t': badChar = 'a tab character'; break; case ',': badChar = 'flow indicator character ,'; break; case '%': badChar = 'directive indicator character %'; break; case '|': case '>': { badChar = `block scalar indicator ${source[0]}`; break; } case '@': case '`': { badChar = `reserved character ${source[0]}`; break; } } if (badChar) onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`); return foldLines(source); } function singleQuotedValue(source, onError) { if (source[source.length - 1] !== "'" || source.length === 1) onError(source.length, 'MISSING_CHAR', "Missing closing 'quote"); return foldLines(source.slice(1, -1)).replace(/''/g, "'"); } function foldLines(source) { /** * The negative lookbehind here and in the `re` RegExp is to * prevent causing a polynomial search time in certain cases. * * The try-catch is for Safari, which doesn't support this yet: * https://caniuse.com/js-regexp-lookbehind */ let first, line; try { first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch; } else { res += ch; } } if (source[source.length - 1] !== '"' || source.length === 1) onError(source.length, 'MISSING_CHAR', 'Missing closing "quote'); return res; } /** * Fold a single newline into a space, multiple newlines to N - 1 newlines. * Presumes `source[offset] === '\n'` */ function foldNewline(source, offset) { let fold = ''; let ch = source[offset + 1]; while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') { if (ch === '\r' && source[offset + 2] !== '\n') break; if (ch === '\n') fold += '\n'; offset += 1; ch = source[offset + 1]; } if (!fold) fold = ' '; return { fold, offset }; } const escapeCodes = { '0': '\0', a: '\x07', b: '\b', e: '\x1b', f: '\f', n: '\n', r: '\r', t: '\t', v: '\v', N: '\u0085', _: '\u00a0', L: '\u2028', P: '\u2029', ' ': ' ', '"': '"', '/': '/', '\\': '\\', '\t': '\t' }; function parseCharCode(source, offset, length, onError) { const cc = source.substr(offset, length); const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); const code = ok ? parseInt(cc, 16) : NaN; if (isNaN(code)) { const raw = source.substr(offset - 2, length + 2); onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`); return raw; } return String.fromCodePoint(code); } function composeScalar(ctx, token, tagToken, onError) { const { value, type, comment, range } = token.type === 'block-scalar' ? resolveBlockScalar(token, ctx.options.strict, onError) : resolveFlowScalar(token, ctx.options.strict, onError); const tagName = tagToken ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)) : null; const tag = tagToken && tagName ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError) : token.type === 'scalar' ? findScalarTagByTest(ctx, value, token, onError) : ctx.schema[SCALAR$1]; let scalar; try { const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options); scalar = isScalar$1(res) ? res : new Scalar(res); } catch (error) { const msg = error instanceof Error ? error.message : String(error); onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg); scalar = new Scalar(value); } scalar.range = range; scalar.source = value; if (type) scalar.type = type; if (tagName) scalar.tag = tagName; if (tag.format) scalar.format = tag.format; if (comment) scalar.comment = comment; return scalar; } function findScalarTagByName(schema, value, tagName, tagToken, onError) { if (tagName === '!') return schema[SCALAR$1]; // non-specific tag const matchWithTest = []; for (const tag of schema.tags) { if (!tag.collection && tag.tag === tagName) { if (tag.default && tag.test) matchWithTest.push(tag); else return tag; } } for (const tag of matchWithTest) if (tag.test?.test(value)) return tag; const kt = schema.knownTags[tagName]; if (kt && !kt.collection) { // Ensure that the known tag is available for stringifying, // but does not get used by default. schema.tags.push(Object.assign({}, kt, { default: false, test: undefined })); return kt; } onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str'); return schema[SCALAR$1]; } function findScalarTagByTest({ directives, schema }, value, token, onError) { const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[SCALAR$1]; if (schema.compat) { const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ?? schema[SCALAR$1]; if (tag.tag !== compat.tag) { const ts = directives.tagString(tag.tag); const cs = directives.tagString(compat.tag); const msg = `Value may be parsed as either ${ts} or ${cs}`; onError(token, 'TAG_RESOLVE_FAILED', msg, true); } } return tag; } function emptyScalarPosition(offset, before, pos) { if (before) { if (pos === null) pos = before.length; for (let i = pos - 1; i >= 0; --i) { let st = before[i]; switch (st.type) { case 'space': case 'comment': case 'newline': offset -= st.source.length; continue; } // Technically, an empty scalar is immediately after the last non-empty // node, but it's more useful to place it after any whitespace. st = before[++i]; while (st?.type === 'space') { offset += st.source.length; st = before[++i]; } break; } } return offset; } const CN = { composeNode, composeEmptyNode }; function composeNode(ctx, token, props, onError) { const { spaceBefore, comment, anchor, tag } = props; let node; let isSrcToken = true; switch (token.type) { case 'alias': node = composeAlias(ctx, token, onError); if (anchor || tag) onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties'); break; case 'scalar': case 'single-quoted-scalar': case 'double-quoted-scalar': case 'block-scalar': node = composeScalar(ctx, token, tag, onError); if (anchor) node.anchor = anchor.source.substring(1); break; case 'block-map': case 'block-seq': case 'flow-collection': node = composeCollection(CN, ctx, token, tag, onError); if (anchor) node.anchor = anchor.source.substring(1); break; default: { const message = token.type === 'error' ? token.message : `Unsupported token (type: ${token.type})`; onError(token, 'UNEXPECTED_TOKEN', message); node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError); isSrcToken = false; } } if (anchor && node.anchor === '') onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); if (spaceBefore) node.spaceBefore = true; if (comment) { if (token.type === 'scalar' && token.source === '') node.comment = comment; else node.commentBefore = comment; } // @ts-expect-error Type checking misses meaning of isSrcToken if (ctx.options.keepSourceTokens && isSrcToken) node.srcToken = token; return node; } function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) { const token = { type: 'scalar', offset: emptyScalarPosition(offset, before, pos), indent: -1, source: '' }; const node = composeScalar(ctx, token, tag, onError); if (anchor) { node.anchor = anchor.source.substring(1); if (node.anchor === '') onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); } if (spaceBefore) node.spaceBefore = true; if (comment) { node.comment = comment; node.range[2] = end; } return node; } function composeAlias({ options }, { offset, source, end }, onError) { const alias = new Alias(source.substring(1)); if (alias.source === '') onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string'); if (alias.source.endsWith(':')) onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true); const valueEnd = offset + source.length; const re = resolveEnd(end, valueEnd, options.strict, onError); alias.range = [offset, valueEnd, re.offset]; if (re.comment) alias.comment = re.comment; return alias; } function composeDoc(options, directives, { offset, start, value, end }, onError) { const opts = Object.assign({ _directives: directives }, options); const doc = new Document(undefined, opts); const ctx = { atRoot: true, directives: doc.directives, options: doc.options, schema: doc.schema }; const props = resolveProps(start, { indicator: 'doc-start', next: value ?? end?.[0], offset, onError, startOnNewline: true }); if (props.found) { doc.directives.docStart = true; if (value && (value.type === 'block-map' || value.type === 'block-seq') && !props.hasNewline) onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker'); } // @ts-expect-error If Contents is set, let's trust the user doc.contents = value ? composeNode(ctx, value, props, onError) : composeEmptyNode(ctx, props.end, start, null, props, onError); const contentEnd = doc.contents.range[2]; const re = resolveEnd(end, contentEnd, false, onError); if (re.comment) doc.comment = re.comment; doc.range = [offset, contentEnd, re.offset]; return doc; } function getErrorPos(src) { if (typeof src === 'number') return [src, src + 1]; if (Array.isArray(src)) return src.length === 2 ? src : [src[0], src[1]]; const { offset, source } = src; return [offset, offset + (typeof source === 'string' ? source.length : 1)]; } function parsePrelude(prelude) { let comment = ''; let atComment = false; let afterEmptyLine = false; for (let i = 0; i < prelude.length; ++i) { const source = prelude[i]; switch (source[0]) { case '#': comment += (comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') + (source.substring(1) || ' '); atComment = true; afterEmptyLine = false; break; case '%': if (prelude[i + 1]?.[0] !== '#') i += 1; atComment = false; break; default: // This may be wrong after doc-end, but in that case it doesn't matter if (!atComment) afterEmptyLine = true; atComment = false; } } return { comment, afterEmptyLine }; } /** * Compose a stream of CST nodes into a stream of YAML Documents. * * ```ts * import { Composer, Parser } from 'yaml' * * const src: string = ... * const tokens = new Parser().parse(src) * const docs = new Composer().compose(tokens) * ``` */ class Composer { constructor(options = {}) { this.doc = null; this.atDirectives = false; this.prelude = []; this.errors = []; this.warnings = []; this.onError = (source, code, message, warning) => { const pos = getErrorPos(source); if (warning) this.warnings.push(new YAMLWarning(pos, code, message)); else this.errors.push(new YAMLParseError(pos, code, message)); }; // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing this.directives = new Directives({ version: options.version || '1.2' }); this.options = options; } decorate(doc, afterDoc) { const { comment, afterEmptyLine } = parsePrelude(this.prelude); //console.log({ dc: doc.comment, prelude, comment }) if (comment) { const dc = doc.contents; if (afterDoc) { doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment; } else if (afterEmptyLine || doc.directives.docStart || !dc) { doc.commentBefore = comment; } else if (isCollection$1(dc) && !dc.flow && dc.items.length > 0) { let it = dc.items[0]; if (isPair(it)) it = it.key; const cb = it.commentBefore; it.commentBefore = cb ? `${comment}\n${cb}` : comment; } else { const cb = dc.commentBefore; dc.commentBefore = cb ? `${comment}\n${cb}` : comment; } } if (afterDoc) { Array.prototype.push.apply(doc.errors, this.errors); Array.prototype.push.apply(doc.warnings, this.warnings); } else { doc.errors = this.errors; doc.warnings = this.warnings; } this.prelude = []; this.errors = []; this.warnings = []; } /** * Current stream status information. * * Mostly useful at the end of input for an empty stream. */ streamInfo() { return { comment: parsePrelude(this.prelude).comment, directives: this.directives, errors: this.errors, warnings: this.warnings }; } /** * Compose tokens into documents. * * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. */ *compose(tokens, forceDoc = false, endOffset = -1) { for (const token of tokens) yield* this.next(token); yield* this.end(forceDoc, endOffset); } /** Advance the composer by one CST token. */ *next(token) { switch (token.type) { case 'directive': this.directives.add(token.source, (offset, message, warning) => { const pos = getErrorPos(token); pos[0] += offset; this.onError(pos, 'BAD_DIRECTIVE', message, warning); }); this.prelude.push(token.source); this.atDirectives = true; break; case 'document': { const doc = composeDoc(this.options, this.directives, token, this.onError); if (this.atDirectives && !doc.directives.docStart) this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line'); this.decorate(doc, false); if (this.doc) yield this.doc; this.doc = doc; this.atDirectives = false; break; } case 'byte-order-mark': case 'space': break; case 'comment': case 'newline': this.prelude.push(token.source); break; case 'error': { const msg = token.source ? `${token.message}: ${JSON.stringify(token.source)}` : token.message; const error = new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg); if (this.atDirectives || !this.doc) this.errors.push(error); else this.doc.errors.push(error); break; } case 'doc-end': { if (!this.doc) { const msg = 'Unexpected doc-end without preceding document'; this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg)); break; } this.doc.directives.docEnd = true; const end = resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError); this.decorate(this.doc, true); if (end.comment) { const dc = this.doc.comment; this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment; } this.doc.range[2] = end.offset; break; } default: this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`)); } } /** * Call at end of input to yield any remaining document. * * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. */ *end(forceDoc = false, endOffset = -1) { if (this.doc) { this.decorate(this.doc, true); yield this.doc; this.doc = null; } else if (forceDoc) { const opts = Object.assign({ _directives: this.directives }, this.options); const doc = new Document(undefined, opts); if (this.atDirectives) this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line'); doc.range = [0, endOffset, endOffset]; this.decorate(doc, false); yield doc; } } } function resolveAsScalar(token, strict = true, onError) { if (token) { const _onError = (pos, code, message) => { const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset; if (onError) onError(offset, code, message); else throw new YAMLParseError([offset, offset + 1], code, message); }; switch (token.type) { case 'scalar': case 'single-quoted-scalar': case 'double-quoted-scalar': return resolveFlowScalar(token, strict, _onError); case 'block-scalar': return resolveBlockScalar(token, strict, _onError); } } return null; } /** * Create a new scalar token with `value` * * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, * as this function does not support any schema operations and won't check for such conflicts. * * @param value The string representation of the value, which will have its content properly indented. * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added. * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. * @param context.indent The indent level of the token. * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value. * @param context.offset The offset position of the token. * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. */ function createScalarToken(value, context) { const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context; const source = stringifyString({ type, value }, { implicitKey, indent: indent > 0 ? ' '.repeat(indent) : '', inFlow, options: { blockQuote: true, lineWidth: -1 } }); const end = context.end ?? [ { type: 'newline', offset: -1, indent, source: '\n' } ]; switch (source[0]) { case '|': case '>': { const he = source.indexOf('\n'); const head = source.substring(0, he); const body = source.substring(he + 1) + '\n'; const props = [ { type: 'block-scalar-header', offset, indent, source: head } ]; if (!addEndtoBlockProps(props, end)) props.push({ type: 'newline', offset: -1, indent, source: '\n' }); return { type: 'block-scalar', offset, indent, props, source: body }; } case '"': return { type: 'double-quoted-scalar', offset, indent, source, end }; case "'": return { type: 'single-quoted-scalar', offset, indent, source, end }; default: return { type: 'scalar', offset, indent, source, end }; } } /** * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have. * * Best efforts are made to retain any comments previously associated with the `token`, * though all contents within a collection's `items` will be overwritten. * * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, * as this function does not support any schema operations and won't check for such conflicts. * * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key. * @param value The string representation of the value, which will have its content properly indented. * @param context.afterKey In most cases, values after a key should have an additional level of indentation. * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value. * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. */ function setScalarValue(token, value, context = {}) { let { afterKey = false, implicitKey = false, inFlow = false, type } = context; let indent = 'indent' in token ? token.indent : null; if (afterKey && typeof indent === 'number') indent += 2; if (!type) switch (token.type) { case 'single-quoted-scalar': type = 'QUOTE_SINGLE'; break; case 'double-quoted-scalar': type = 'QUOTE_DOUBLE'; break; case 'block-scalar': { const header = token.props[0]; if (header.type !== 'block-scalar-header') throw new Error('Invalid block scalar header'); type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL'; break; } default: type = 'PLAIN'; } const source = stringifyString({ type, value }, { implicitKey: implicitKey || indent === null, indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '', inFlow, options: { blockQuote: true, lineWidth: -1 } }); switch (source[0]) { case '|': case '>': setBlockScalarValue(token, source); break; case '"': setFlowScalarValue(token, source, 'double-quoted-scalar'); break; case "'": setFlowScalarValue(token, source, 'single-quoted-scalar'); break; default: setFlowScalarValue(token, source, 'scalar'); } } function setBlockScalarValue(token, source) { const he = source.indexOf('\n'); const head = source.substring(0, he); const body = source.substring(he + 1) + '\n'; if (token.type === 'block-scalar') { const header = token.props[0]; if (header.type !== 'block-scalar-header') throw new Error('Invalid block scalar header'); header.source = head; token.source = body; } else { const { offset } = token; const indent = 'indent' in token ? token.indent : -1; const props = [ { type: 'block-scalar-header', offset, indent, source: head } ]; if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined)) props.push({ type: 'newline', offset: -1, indent, source: '\n' }); for (const key of Object.keys(token)) if (key !== 'type' && key !== 'offset') delete token[key]; Object.assign(token, { type: 'block-scalar', indent, props, source: body }); } } /** @returns `true` if last token is a newline */ function addEndtoBlockProps(props, end) { if (end) for (const st of end) switch (st.type) { case 'space': case 'comment': props.push(st); break; case 'newline': props.push(st); return true; } return false; } function setFlowScalarValue(token, source, type) { switch (token.type) { case 'scalar': case 'double-quoted-scalar': case 'single-quoted-scalar': token.type = type; token.source = source; break; case 'block-scalar': { const end = token.props.slice(1); let oa = source.length; if (token.props[0].type === 'block-scalar-header') oa -= token.props[0].source.length; for (const tok of end) tok.offset += oa; delete token.props; Object.assign(token, { type, source, end }); break; } case 'block-map': case 'block-seq': { const offset = token.offset + source.length; const nl = { type: 'newline', offset, indent: token.indent, source: '\n' }; delete token.items; Object.assign(token, { type, source, end: [nl] }); break; } default: { const indent = 'indent' in token ? token.indent : -1; const end = 'end' in token && Array.isArray(token.end) ? token.end.filter(st => st.type === 'space' || st.type === 'comment' || st.type === 'newline') : []; for (const key of Object.keys(token)) if (key !== 'type' && key !== 'offset') delete token[key]; Object.assign(token, { type, indent, source, end }); } } } /** * Stringify a CST document, token, or collection item * * Fair warning: This applies no validation whatsoever, and * simply concatenates the sources in their logical order. */ const stringify$1 = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst); function stringifyToken(token) { switch (token.type) { case 'block-scalar': { let res = ''; for (const tok of token.props) res += stringifyToken(tok); return res + token.source; } case 'block-map': case 'block-seq': { let res = ''; for (const item of token.items) res += stringifyItem(item); return res; } case 'flow-collection': { let res = token.start.source; for (const item of token.items) res += stringifyItem(item); for (const st of token.end) res += st.source; return res; } case 'document': { let res = stringifyItem(token); if (token.end) for (const st of token.end) res += st.source; return res; } default: { let res = token.source; if ('end' in token && token.end) for (const st of token.end) res += st.source; return res; } } } function stringifyItem({ start, key, sep, value }) { let res = ''; for (const st of start) res += st.source; if (key) res += stringifyToken(key); if (sep) for (const st of sep) res += st.source; if (value) res += stringifyToken(value); return res; } const BREAK = Symbol('break visit'); const SKIP = Symbol('skip children'); const REMOVE = Symbol('remove item'); /** * Apply a visitor to a CST document or item. * * Walks through the tree (depth-first) starting from the root, calling a * `visitor` function with two arguments when entering each item: * - `item`: The current item, which included the following members: * - `start: SourceToken[]` – Source tokens before the key or value, * possibly including its anchor or tag. * - `key?: Token | null` – Set for pair values. May then be `null`, if * the key before the `:` separator is empty. * - `sep?: SourceToken[]` – Source tokens between the key and the value, * which should include the `:` map value indicator if `value` is set. * - `value?: Token` – The value of a sequence item, or of a map pair. * - `path`: The steps from the root to the current node, as an array of * `['key' | 'value', number]` tuples. * * The return value of the visitor may be used to control the traversal: * - `undefined` (default): Do nothing and continue * - `visit.SKIP`: Do not visit the children of this token, continue with * next sibling * - `visit.BREAK`: Terminate traversal completely * - `visit.REMOVE`: Remove the current item, then continue with the next one * - `number`: Set the index of the next step. This is useful especially if * the index of the current token has changed. * - `function`: Define the next visitor for this item. After the original * visitor is called on item entry, next visitors are called after handling * a non-empty `key` and when exiting the item. */ function visit(cst, visitor) { if ('type' in cst && cst.type === 'document') cst = { start: cst.start, value: cst.value }; _visit(Object.freeze([]), cst, visitor); } // Without the `as symbol` casts, TS declares these in the `visit` // namespace using `var`, but then complains about that because // `unique symbol` must be `const`. /** Terminate visit traversal completely */ visit.BREAK = BREAK; /** Do not visit the children of the current item */ visit.SKIP = SKIP; /** Remove the current item */ visit.REMOVE = REMOVE; /** Find the item at `path` from `cst` as the root */ visit.itemAtPath = (cst, path) => { let item = cst; for (const [field, index] of path) { const tok = item?.[field]; if (tok && 'items' in tok) { item = tok.items[index]; } else return undefined; } return item; }; /** * Get the immediate parent collection of the item at `path` from `cst` as the root. * * Throws an error if the collection is not found, which should never happen if the item itself exists. */ visit.parentCollection = (cst, path) => { const parent = visit.itemAtPath(cst, path.slice(0, -1)); const field = path[path.length - 1][0]; const coll = parent?.[field]; if (coll && 'items' in coll) return coll; throw new Error('Parent collection not found'); }; function _visit(path, item, visitor) { let ctrl = visitor(item, path); if (typeof ctrl === 'symbol') return ctrl; for (const field of ['key', 'value']) { const token = item[field]; if (token && 'items' in token) { for (let i = 0; i < token.items.length; ++i) { const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor); if (typeof ci === 'number') i = ci - 1; else if (ci === BREAK) return BREAK; else if (ci === REMOVE) { token.items.splice(i, 1); i -= 1; } } if (typeof ctrl === 'function' && field === 'key') ctrl = ctrl(item, path); } } return typeof ctrl === 'function' ? ctrl(item, path) : ctrl; } /** The byte order mark */ const BOM = '\u{FEFF}'; /** Start of doc-mode */ const DOCUMENT = '\x02'; // C0: Start of Text /** Unexpected end of flow-mode */ const FLOW_END = '\x18'; // C0: Cancel /** Next token is a scalar value */ const SCALAR = '\x1f'; // C0: Unit Separator /** @returns `true` if `token` is a flow or block collection */ const isCollection = (token) => !!token && 'items' in token; /** @returns `true` if `token` is a flow or block scalar; not an alias */ const isScalar = (token) => !!token && (token.type === 'scalar' || token.type === 'single-quoted-scalar' || token.type === 'double-quoted-scalar' || token.type === 'block-scalar'); /* istanbul ignore next */ /** Get a printable representation of a lexer token */ function prettyToken(token) { switch (token) { case BOM: return ''; case DOCUMENT: return ''; case FLOW_END: return ''; case SCALAR: return ''; default: return JSON.stringify(token); } } /** Identify the type of a lexer token. May return `null` for unknown tokens. */ function tokenType(source) { switch (source) { case BOM: return 'byte-order-mark'; case DOCUMENT: return 'doc-mode'; case FLOW_END: return 'flow-error-end'; case SCALAR: return 'scalar'; case '---': return 'doc-start'; case '...': return 'doc-end'; case '': case '\n': case '\r\n': return 'newline'; case '-': return 'seq-item-ind'; case '?': return 'explicit-key-ind'; case ':': return 'map-value-ind'; case '{': return 'flow-map-start'; case '}': return 'flow-map-end'; case '[': return 'flow-seq-start'; case ']': return 'flow-seq-end'; case ',': return 'comma'; } switch (source[0]) { case ' ': case '\t': return 'space'; case '#': return 'comment'; case '%': return 'directive-line'; case '*': return 'alias'; case '&': return 'anchor'; case '!': return 'tag'; case "'": return 'single-quoted-scalar'; case '"': return 'double-quoted-scalar'; case '|': case '>': return 'block-scalar-header'; } return null; } var cst = { __proto__: null, BOM: BOM, DOCUMENT: DOCUMENT, FLOW_END: FLOW_END, SCALAR: SCALAR, createScalarToken: createScalarToken, isCollection: isCollection, isScalar: isScalar, prettyToken: prettyToken, resolveAsScalar: resolveAsScalar, setScalarValue: setScalarValue, stringify: stringify$1, tokenType: tokenType, visit: visit }; /* START -> stream stream directive -> line-end -> stream indent + line-end -> stream [else] -> line-start line-end comment -> line-end newline -> . input-end -> END line-start doc-start -> doc doc-end -> stream [else] -> indent -> block-start block-start seq-item-start -> block-start explicit-key-start -> block-start map-value-start -> block-start [else] -> doc doc line-end -> line-start spaces -> doc anchor -> doc tag -> doc flow-start -> flow -> doc flow-end -> error -> doc seq-item-start -> error -> doc explicit-key-start -> error -> doc map-value-start -> doc alias -> doc quote-start -> quoted-scalar -> doc block-scalar-header -> line-end -> block-scalar(min) -> line-start [else] -> plain-scalar(false, min) -> doc flow line-end -> flow spaces -> flow anchor -> flow tag -> flow flow-start -> flow -> flow flow-end -> . seq-item-start -> error -> flow explicit-key-start -> flow map-value-start -> flow alias -> flow quote-start -> quoted-scalar -> flow comma -> flow [else] -> plain-scalar(true, 0) -> flow quoted-scalar quote-end -> . [else] -> quoted-scalar block-scalar(min) newline + peek(indent < min) -> . [else] -> block-scalar(min) plain-scalar(is-flow, min) scalar-end(is-flow) -> . peek(newline + (indent < min)) -> . [else] -> plain-scalar(min) */ function isEmpty(ch) { switch (ch) { case undefined: case ' ': case '\n': case '\r': case '\t': return true; default: return false; } } const hexDigits = '0123456789ABCDEFabcdef'.split(''); const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split(''); const invalidFlowScalarChars = ',[]{}'.split(''); const invalidAnchorChars = ' ,[]{}\n\r\t'.split(''); const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch); /** * Splits an input string into lexical tokens, i.e. smaller strings that are * easily identifiable by `tokens.tokenType()`. * * Lexing starts always in a "stream" context. Incomplete input may be buffered * until a complete token can be emitted. * * In addition to slices of the original input, the following control characters * may also be emitted: * * - `\x02` (Start of Text): A document starts with the next token * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error) * - `\x1f` (Unit Separator): Next token is a scalar value * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents */ class Lexer { constructor() { /** * Flag indicating whether the end of the current buffer marks the end of * all input */ this.atEnd = false; /** * Explicit indent set in block scalar header, as an offset from the current * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not * explicitly set. */ this.blockScalarIndent = -1; /** * Block scalars that include a + (keep) chomping indicator in their header * include trailing empty lines, which are otherwise excluded from the * scalar's contents. */ this.blockScalarKeep = false; /** Current input */ this.buffer = ''; /** * Flag noting whether the map value indicator : can immediately follow this * node within a flow context. */ this.flowKey = false; /** Count of surrounding flow collection levels. */ this.flowLevel = 0; /** * Minimum level of indentation required for next lines to be parsed as a * part of the current scalar value. */ this.indentNext = 0; /** Indentation level of the current line. */ this.indentValue = 0; /** Position of the next \n character. */ this.lineEndPos = null; /** Stores the state of the lexer if reaching the end of incpomplete input */ this.next = null; /** A pointer to `buffer`; the current position of the lexer. */ this.pos = 0; } /** * Generate YAML tokens from the `source` string. If `incomplete`, * a part of the last line may be left as a buffer for the next call. * * @returns A generator of lexical tokens */ *lex(source, incomplete = false) { if (source) { this.buffer = this.buffer ? this.buffer + source : source; this.lineEndPos = null; } this.atEnd = !incomplete; let next = this.next ?? 'stream'; while (next && (incomplete || this.hasChars(1))) next = yield* this.parseNext(next); } atLineEnd() { let i = this.pos; let ch = this.buffer[i]; while (ch === ' ' || ch === '\t') ch = this.buffer[++i]; if (!ch || ch === '#' || ch === '\n') return true; if (ch === '\r') return this.buffer[i + 1] === '\n'; return false; } charAt(n) { return this.buffer[this.pos + n]; } continueScalar(offset) { let ch = this.buffer[offset]; if (this.indentNext > 0) { let indent = 0; while (ch === ' ') ch = this.buffer[++indent + offset]; if (ch === '\r') { const next = this.buffer[indent + offset + 1]; if (next === '\n' || (!next && !this.atEnd)) return offset + indent + 1; } return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd) ? offset + indent : -1; } if (ch === '-' || ch === '.') { const dt = this.buffer.substr(offset, 3); if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3])) return -1; } return offset; } getLine() { let end = this.lineEndPos; if (typeof end !== 'number' || (end !== -1 && end < this.pos)) { end = this.buffer.indexOf('\n', this.pos); this.lineEndPos = end; } if (end === -1) return this.atEnd ? this.buffer.substring(this.pos) : null; if (this.buffer[end - 1] === '\r') end -= 1; return this.buffer.substring(this.pos, end); } hasChars(n) { return this.pos + n <= this.buffer.length; } setNext(state) { this.buffer = this.buffer.substring(this.pos); this.pos = 0; this.lineEndPos = null; this.next = state; return null; } peek(n) { return this.buffer.substr(this.pos, n); } *parseNext(next) { switch (next) { case 'stream': return yield* this.parseStream(); case 'line-start': return yield* this.parseLineStart(); case 'block-start': return yield* this.parseBlockStart(); case 'doc': return yield* this.parseDocument(); case 'flow': return yield* this.parseFlowCollection(); case 'quoted-scalar': return yield* this.parseQuotedScalar(); case 'block-scalar': return yield* this.parseBlockScalar(); case 'plain-scalar': return yield* this.parsePlainScalar(); } } *parseStream() { let line = this.getLine(); if (line === null) return this.setNext('stream'); if (line[0] === BOM) { yield* this.pushCount(1); line = line.substring(1); } if (line[0] === '%') { let dirEnd = line.length; const cs = line.indexOf('#'); if (cs !== -1) { const ch = line[cs - 1]; if (ch === ' ' || ch === '\t') dirEnd = cs - 1; } while (true) { const ch = line[dirEnd - 1]; if (ch === ' ' || ch === '\t') dirEnd -= 1; else break; } const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true)); yield* this.pushCount(line.length - n); // possible comment this.pushNewline(); return 'stream'; } if (this.atLineEnd()) { const sp = yield* this.pushSpaces(true); yield* this.pushCount(line.length - sp); yield* this.pushNewline(); return 'stream'; } yield DOCUMENT; return yield* this.parseLineStart(); } *parseLineStart() { const ch = this.charAt(0); if (!ch && !this.atEnd) return this.setNext('line-start'); if (ch === '-' || ch === '.') { if (!this.atEnd && !this.hasChars(4)) return this.setNext('line-start'); const s = this.peek(3); if (s === '---' && isEmpty(this.charAt(3))) { yield* this.pushCount(3); this.indentValue = 0; this.indentNext = 0; return 'doc'; } else if (s === '...' && isEmpty(this.charAt(3))) { yield* this.pushCount(3); return 'stream'; } } this.indentValue = yield* this.pushSpaces(false); if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1))) this.indentNext = this.indentValue; return yield* this.parseBlockStart(); } *parseBlockStart() { const [ch0, ch1] = this.peek(2); if (!ch1 && !this.atEnd) return this.setNext('block-start'); if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) { const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true)); this.indentNext = this.indentValue + 1; this.indentValue += n; return yield* this.parseBlockStart(); } return 'doc'; } *parseDocument() { yield* this.pushSpaces(true); const line = this.getLine(); if (line === null) return this.setNext('doc'); let n = yield* this.pushIndicators(); switch (line[n]) { case '#': yield* this.pushCount(line.length - n); // fallthrough case undefined: yield* this.pushNewline(); return yield* this.parseLineStart(); case '{': case '[': yield* this.pushCount(1); this.flowKey = false; this.flowLevel = 1; return 'flow'; case '}': case ']': // this is an error yield* this.pushCount(1); return 'doc'; case '*': yield* this.pushUntil(isNotAnchorChar); return 'doc'; case '"': case "'": return yield* this.parseQuotedScalar(); case '|': case '>': n += yield* this.parseBlockScalarHeader(); n += yield* this.pushSpaces(true); yield* this.pushCount(line.length - n); yield* this.pushNewline(); return yield* this.parseBlockScalar(); default: return yield* this.parsePlainScalar(); } } *parseFlowCollection() { let nl, sp; let indent = -1; do { nl = yield* this.pushNewline(); if (nl > 0) { sp = yield* this.pushSpaces(false); this.indentValue = indent = sp; } else { sp = 0; } sp += yield* this.pushSpaces(true); } while (nl + sp > 0); const line = this.getLine(); if (line === null) return this.setNext('flow'); if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') || (indent === 0 && (line.startsWith('---') || line.startsWith('...')) && isEmpty(line[3]))) { // Allowing for the terminal ] or } at the same (rather than greater) // indent level as the initial [ or { is technically invalid, but // failing here would be surprising to users. const atFlowEndMarker = indent === this.indentNext - 1 && this.flowLevel === 1 && (line[0] === ']' || line[0] === '}'); if (!atFlowEndMarker) { // this is an error this.flowLevel = 0; yield FLOW_END; return yield* this.parseLineStart(); } } let n = 0; while (line[n] === ',') { n += yield* this.pushCount(1); n += yield* this.pushSpaces(true); this.flowKey = false; } n += yield* this.pushIndicators(); switch (line[n]) { case undefined: return 'flow'; case '#': yield* this.pushCount(line.length - n); return 'flow'; case '{': case '[': yield* this.pushCount(1); this.flowKey = false; this.flowLevel += 1; return 'flow'; case '}': case ']': yield* this.pushCount(1); this.flowKey = true; this.flowLevel -= 1; return this.flowLevel ? 'flow' : 'doc'; case '*': yield* this.pushUntil(isNotAnchorChar); return 'flow'; case '"': case "'": this.flowKey = true; return yield* this.parseQuotedScalar(); case ':': { const next = this.charAt(1); if (this.flowKey || isEmpty(next) || next === ',') { this.flowKey = false; yield* this.pushCount(1); yield* this.pushSpaces(true); return 'flow'; } } // fallthrough default: this.flowKey = false; return yield* this.parsePlainScalar(); } } *parseQuotedScalar() { const quote = this.charAt(0); let end = this.buffer.indexOf(quote, this.pos + 1); if (quote === "'") { while (end !== -1 && this.buffer[end + 1] === "'") end = this.buffer.indexOf("'", end + 2); } else { // double-quote while (end !== -1) { let n = 0; while (this.buffer[end - 1 - n] === '\\') n += 1; if (n % 2 === 0) break; end = this.buffer.indexOf('"', end + 1); } } // Only looking for newlines within the quotes const qb = this.buffer.substring(0, end); let nl = qb.indexOf('\n', this.pos); if (nl !== -1) { while (nl !== -1) { const cs = this.continueScalar(nl + 1); if (cs === -1) break; nl = qb.indexOf('\n', cs); } if (nl !== -1) { // this is an error caused by an unexpected unindent end = nl - (qb[nl - 1] === '\r' ? 2 : 1); } } if (end === -1) { if (!this.atEnd) return this.setNext('quoted-scalar'); end = this.buffer.length; } yield* this.pushToIndex(end + 1, false); return this.flowLevel ? 'flow' : 'doc'; } *parseBlockScalarHeader() { this.blockScalarIndent = -1; this.blockScalarKeep = false; let i = this.pos; while (true) { const ch = this.buffer[++i]; if (ch === '+') this.blockScalarKeep = true; else if (ch > '0' && ch <= '9') this.blockScalarIndent = Number(ch) - 1; else if (ch !== '-') break; } return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#'); } *parseBlockScalar() { let nl = this.pos - 1; // may be -1 if this.pos === 0 let indent = 0; let ch; loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) { switch (ch) { case ' ': indent += 1; break; case '\n': nl = i; indent = 0; break; case '\r': { const next = this.buffer[i + 1]; if (!next && !this.atEnd) return this.setNext('block-scalar'); if (next === '\n') break; } // fallthrough default: break loop; } } if (!ch && !this.atEnd) return this.setNext('block-scalar'); if (indent >= this.indentNext) { if (this.blockScalarIndent === -1) this.indentNext = indent; else this.indentNext += this.blockScalarIndent; do { const cs = this.continueScalar(nl + 1); if (cs === -1) break; nl = this.buffer.indexOf('\n', cs); } while (nl !== -1); if (nl === -1) { if (!this.atEnd) return this.setNext('block-scalar'); nl = this.buffer.length; } } if (!this.blockScalarKeep) { do { let i = nl - 1; let ch = this.buffer[i]; if (ch === '\r') ch = this.buffer[--i]; const lastChar = i; // Drop the line if last char not more indented while (ch === ' ' || ch === '\t') ch = this.buffer[--i]; if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar) nl = i; else break; } while (true); } yield SCALAR; yield* this.pushToIndex(nl + 1, true); return yield* this.parseLineStart(); } *parsePlainScalar() { const inFlow = this.flowLevel > 0; let end = this.pos - 1; let i = this.pos - 1; let ch; while ((ch = this.buffer[++i])) { if (ch === ':') { const next = this.buffer[i + 1]; if (isEmpty(next) || (inFlow && next === ',')) break; end = i; } else if (isEmpty(ch)) { let next = this.buffer[i + 1]; if (ch === '\r') { if (next === '\n') { i += 1; ch = '\n'; next = this.buffer[i + 1]; } else end = i; } if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next))) break; if (ch === '\n') { const cs = this.continueScalar(i + 1); if (cs === -1) break; i = Math.max(i, cs - 2); // to advance, but still account for ' #' } } else { if (inFlow && invalidFlowScalarChars.includes(ch)) break; end = i; } } if (!ch && !this.atEnd) return this.setNext('plain-scalar'); yield SCALAR; yield* this.pushToIndex(end + 1, true); return inFlow ? 'flow' : 'doc'; } *pushCount(n) { if (n > 0) { yield this.buffer.substr(this.pos, n); this.pos += n; return n; } return 0; } *pushToIndex(i, allowEmpty) { const s = this.buffer.slice(this.pos, i); if (s) { yield s; this.pos += s.length; return s.length; } else if (allowEmpty) yield ''; return 0; } *pushIndicators() { switch (this.charAt(0)) { case '!': return ((yield* this.pushTag()) + (yield* this.pushSpaces(true)) + (yield* this.pushIndicators())); case '&': return ((yield* this.pushUntil(isNotAnchorChar)) + (yield* this.pushSpaces(true)) + (yield* this.pushIndicators())); case '-': // this is an error case '?': // this is an error outside flow collections case ':': { const inFlow = this.flowLevel > 0; const ch1 = this.charAt(1); if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) { if (!inFlow) this.indentNext = this.indentValue + 1; else if (this.flowKey) this.flowKey = false; return ((yield* this.pushCount(1)) + (yield* this.pushSpaces(true)) + (yield* this.pushIndicators())); } } } return 0; } *pushTag() { if (this.charAt(1) === '<') { let i = this.pos + 2; let ch = this.buffer[i]; while (!isEmpty(ch) && ch !== '>') ch = this.buffer[++i]; return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false); } else { let i = this.pos + 1; let ch = this.buffer[i]; while (ch) { if (tagChars.includes(ch)) ch = this.buffer[++i]; else if (ch === '%' && hexDigits.includes(this.buffer[i + 1]) && hexDigits.includes(this.buffer[i + 2])) { ch = this.buffer[(i += 3)]; } else break; } return yield* this.pushToIndex(i, false); } } *pushNewline() { const ch = this.buffer[this.pos]; if (ch === '\n') return yield* this.pushCount(1); else if (ch === '\r' && this.charAt(1) === '\n') return yield* this.pushCount(2); else return 0; } *pushSpaces(allowTabs) { let i = this.pos - 1; let ch; do { ch = this.buffer[++i]; } while (ch === ' ' || (allowTabs && ch === '\t')); const n = i - this.pos; if (n > 0) { yield this.buffer.substr(this.pos, n); this.pos = i; } return n; } *pushUntil(test) { let i = this.pos; let ch = this.buffer[i]; while (!test(ch)) ch = this.buffer[++i]; return yield* this.pushToIndex(i, false); } } /** * Tracks newlines during parsing in order to provide an efficient API for * determining the one-indexed `{ line, col }` position for any offset * within the input. */ class LineCounter { constructor() { this.lineStarts = []; /** * Should be called in ascending order. Otherwise, call * `lineCounter.lineStarts.sort()` before calling `linePos()`. */ this.addNewLine = (offset) => this.lineStarts.push(offset); /** * Performs a binary search and returns the 1-indexed { line, col } * position of `offset`. If `line === 0`, `addNewLine` has never been * called or `offset` is before the first known newline. */ this.linePos = (offset) => { let low = 0; let high = this.lineStarts.length; while (low < high) { const mid = (low + high) >> 1; // Math.floor((low + high) / 2) if (this.lineStarts[mid] < offset) low = mid + 1; else high = mid; } if (this.lineStarts[low] === offset) return { line: low + 1, col: 1 }; if (low === 0) return { line: 0, col: offset }; const start = this.lineStarts[low - 1]; return { line: low, col: offset - start + 1 }; }; } } function includesToken(list, type) { for (let i = 0; i < list.length; ++i) if (list[i].type === type) return true; return false; } function findNonEmptyIndex(list) { for (let i = 0; i < list.length; ++i) { switch (list[i].type) { case 'space': case 'comment': case 'newline': break; default: return i; } } return -1; } function isFlowToken(token) { switch (token?.type) { case 'alias': case 'scalar': case 'single-quoted-scalar': case 'double-quoted-scalar': case 'flow-collection': return true; default: return false; } } function getPrevProps(parent) { switch (parent.type) { case 'document': return parent.start; case 'block-map': { const it = parent.items[parent.items.length - 1]; return it.sep ?? it.start; } case 'block-seq': return parent.items[parent.items.length - 1].start; /* istanbul ignore next should not happen */ default: return []; } } /** Note: May modify input array */ function getFirstKeyStartProps(prev) { if (prev.length === 0) return []; let i = prev.length; loop: while (--i >= 0) { switch (prev[i].type) { case 'doc-start': case 'explicit-key-ind': case 'map-value-ind': case 'seq-item-ind': case 'newline': break loop; } } while (prev[++i]?.type === 'space') { /* loop */ } return prev.splice(i, prev.length); } function fixFlowSeqItems(fc) { if (fc.start.type === 'flow-seq-start') { for (const it of fc.items) { if (it.sep && !it.value && !includesToken(it.start, 'explicit-key-ind') && !includesToken(it.sep, 'map-value-ind')) { if (it.key) it.value = it.key; delete it.key; if (isFlowToken(it.value)) { if (it.value.end) Array.prototype.push.apply(it.value.end, it.sep); else it.value.end = it.sep; } else Array.prototype.push.apply(it.start, it.sep); delete it.sep; } } } } /** * A YAML concrete syntax tree (CST) parser * * ```ts * const src: string = ... * for (const token of new Parser().parse(src)) { * // token: Token * } * ``` * * To use the parser with a user-provided lexer: * * ```ts * function* parse(source: string, lexer: Lexer) { * const parser = new Parser() * for (const lexeme of lexer.lex(source)) * yield* parser.next(lexeme) * yield* parser.end() * } * * const src: string = ... * const lexer = new Lexer() * for (const token of parse(src, lexer)) { * // token: Token * } * ``` */ let Parser$1 = class Parser { /** * @param onNewLine - If defined, called separately with the start position of * each new line (in `parse()`, including the start of input). */ constructor(onNewLine) { /** If true, space and sequence indicators count as indentation */ this.atNewLine = true; /** If true, next token is a scalar value */ this.atScalar = false; /** Current indentation level */ this.indent = 0; /** Current offset since the start of parsing */ this.offset = 0; /** On the same line with a block map key */ this.onKeyLine = false; /** Top indicates the node that's currently being built */ this.stack = []; /** The source of the current token, set in parse() */ this.source = ''; /** The type of the current token, set in parse() */ this.type = ''; // Must be defined after `next()` this.lexer = new Lexer(); this.onNewLine = onNewLine; } /** * Parse `source` as a YAML stream. * If `incomplete`, a part of the last line may be left as a buffer for the next call. * * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens. * * @returns A generator of tokens representing each directive, document, and other structure. */ *parse(source, incomplete = false) { if (this.onNewLine && this.offset === 0) this.onNewLine(0); for (const lexeme of this.lexer.lex(source, incomplete)) yield* this.next(lexeme); if (!incomplete) yield* this.end(); } /** * Advance the parser by the `source` of one lexical token. */ *next(source) { this.source = source; if (this.atScalar) { this.atScalar = false; yield* this.step(); this.offset += source.length; return; } const type = tokenType(source); if (!type) { const message = `Not a YAML token: ${source}`; yield* this.pop({ type: 'error', offset: this.offset, message, source }); this.offset += source.length; } else if (type === 'scalar') { this.atNewLine = false; this.atScalar = true; this.type = 'scalar'; } else { this.type = type; yield* this.step(); switch (type) { case 'newline': this.atNewLine = true; this.indent = 0; if (this.onNewLine) this.onNewLine(this.offset + source.length); break; case 'space': if (this.atNewLine && source[0] === ' ') this.indent += source.length; break; case 'explicit-key-ind': case 'map-value-ind': case 'seq-item-ind': if (this.atNewLine) this.indent += source.length; break; case 'doc-mode': case 'flow-error-end': return; default: this.atNewLine = false; } this.offset += source.length; } } /** Call at end of input to push out any remaining constructions */ *end() { while (this.stack.length > 0) yield* this.pop(); } get sourceToken() { const st = { type: this.type, offset: this.offset, indent: this.indent, source: this.source }; return st; } *step() { const top = this.peek(1); if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) { while (this.stack.length > 0) yield* this.pop(); this.stack.push({ type: 'doc-end', offset: this.offset, source: this.source }); return; } if (!top) return yield* this.stream(); switch (top.type) { case 'document': return yield* this.document(top); case 'alias': case 'scalar': case 'single-quoted-scalar': case 'double-quoted-scalar': return yield* this.scalar(top); case 'block-scalar': return yield* this.blockScalar(top); case 'block-map': return yield* this.blockMap(top); case 'block-seq': return yield* this.blockSequence(top); case 'flow-collection': return yield* this.flowCollection(top); case 'doc-end': return yield* this.documentEnd(top); } /* istanbul ignore next should not happen */ yield* this.pop(); } peek(n) { return this.stack[this.stack.length - n]; } *pop(error) { const token = error ?? this.stack.pop(); /* istanbul ignore if should not happen */ if (!token) { const message = 'Tried to pop an empty stack'; yield { type: 'error', offset: this.offset, source: '', message }; } else if (this.stack.length === 0) { yield token; } else { const top = this.peek(1); if (token.type === 'block-scalar') { // Block scalars use their parent rather than header indent token.indent = 'indent' in top ? top.indent : 0; } else if (token.type === 'flow-collection' && top.type === 'document') { // Ignore all indent for top-level flow collections token.indent = 0; } if (token.type === 'flow-collection') fixFlowSeqItems(token); switch (top.type) { case 'document': top.value = token; break; case 'block-scalar': top.props.push(token); // error break; case 'block-map': { const it = top.items[top.items.length - 1]; if (it.value) { top.items.push({ start: [], key: token, sep: [] }); this.onKeyLine = true; return; } else if (it.sep) { it.value = token; } else { Object.assign(it, { key: token, sep: [] }); this.onKeyLine = !includesToken(it.start, 'explicit-key-ind'); return; } break; } case 'block-seq': { const it = top.items[top.items.length - 1]; if (it.value) top.items.push({ start: [], value: token }); else it.value = token; break; } case 'flow-collection': { const it = top.items[top.items.length - 1]; if (!it || it.value) top.items.push({ start: [], key: token, sep: [] }); else if (it.sep) it.value = token; else Object.assign(it, { key: token, sep: [] }); return; } /* istanbul ignore next should not happen */ default: yield* this.pop(); yield* this.pop(token); } if ((top.type === 'document' || top.type === 'block-map' || top.type === 'block-seq') && (token.type === 'block-map' || token.type === 'block-seq')) { const last = token.items[token.items.length - 1]; if (last && !last.sep && !last.value && last.start.length > 0 && findNonEmptyIndex(last.start) === -1 && (token.indent === 0 || last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) { if (top.type === 'document') top.end = last.start; else top.items.push({ start: last.start }); token.items.splice(-1, 1); } } } } *stream() { switch (this.type) { case 'directive-line': yield { type: 'directive', offset: this.offset, source: this.source }; return; case 'byte-order-mark': case 'space': case 'comment': case 'newline': yield this.sourceToken; return; case 'doc-mode': case 'doc-start': { const doc = { type: 'document', offset: this.offset, start: [] }; if (this.type === 'doc-start') doc.start.push(this.sourceToken); this.stack.push(doc); return; } } yield { type: 'error', offset: this.offset, message: `Unexpected ${this.type} token in YAML stream`, source: this.source }; } *document(doc) { if (doc.value) return yield* this.lineEnd(doc); switch (this.type) { case 'doc-start': { if (findNonEmptyIndex(doc.start) !== -1) { yield* this.pop(); yield* this.step(); } else doc.start.push(this.sourceToken); return; } case 'anchor': case 'tag': case 'space': case 'comment': case 'newline': doc.start.push(this.sourceToken); return; } const bv = this.startBlockValue(doc); if (bv) this.stack.push(bv); else { yield { type: 'error', offset: this.offset, message: `Unexpected ${this.type} token in YAML document`, source: this.source }; } } *scalar(scalar) { if (this.type === 'map-value-ind') { const prev = getPrevProps(this.peek(2)); const start = getFirstKeyStartProps(prev); let sep; if (scalar.end) { sep = scalar.end; sep.push(this.sourceToken); delete scalar.end; } else sep = [this.sourceToken]; const map = { type: 'block-map', offset: scalar.offset, indent: scalar.indent, items: [{ start, key: scalar, sep }] }; this.onKeyLine = true; this.stack[this.stack.length - 1] = map; } else yield* this.lineEnd(scalar); } *blockScalar(scalar) { switch (this.type) { case 'space': case 'comment': case 'newline': scalar.props.push(this.sourceToken); return; case 'scalar': scalar.source = this.source; // block-scalar source includes trailing newline this.atNewLine = true; this.indent = 0; if (this.onNewLine) { let nl = this.source.indexOf('\n') + 1; while (nl !== 0) { this.onNewLine(this.offset + nl); nl = this.source.indexOf('\n', nl) + 1; } } yield* this.pop(); break; /* istanbul ignore next should not happen */ default: yield* this.pop(); yield* this.step(); } } *blockMap(map) { const it = map.items[map.items.length - 1]; // it.sep is true-ish if pair already has key or : separator switch (this.type) { case 'newline': this.onKeyLine = false; if (it.value) { const end = 'end' in it.value ? it.value.end : undefined; const last = Array.isArray(end) ? end[end.length - 1] : undefined; if (last?.type === 'comment') end?.push(this.sourceToken); else map.items.push({ start: [this.sourceToken] }); } else if (it.sep) { it.sep.push(this.sourceToken); } else { it.start.push(this.sourceToken); } return; case 'space': case 'comment': if (it.value) { map.items.push({ start: [this.sourceToken] }); } else if (it.sep) { it.sep.push(this.sourceToken); } else { if (this.atIndentedComment(it.start, map.indent)) { const prev = map.items[map.items.length - 2]; const end = prev?.value?.end; if (Array.isArray(end)) { Array.prototype.push.apply(end, it.start); end.push(this.sourceToken); map.items.pop(); return; } } it.start.push(this.sourceToken); } return; } if (this.indent >= map.indent) { const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep; // For empty nodes, assign newline-separated not indented empty tokens to following node let start = []; if (atNextItem && it.sep && !it.value) { const nl = []; for (let i = 0; i < it.sep.length; ++i) { const st = it.sep[i]; switch (st.type) { case 'newline': nl.push(i); break; case 'space': break; case 'comment': if (st.indent > map.indent) nl.length = 0; break; default: nl.length = 0; } } if (nl.length >= 2) start = it.sep.splice(nl[1]); } switch (this.type) { case 'anchor': case 'tag': if (atNextItem || it.value) { start.push(this.sourceToken); map.items.push({ start }); this.onKeyLine = true; } else if (it.sep) { it.sep.push(this.sourceToken); } else { it.start.push(this.sourceToken); } return; case 'explicit-key-ind': if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) { it.start.push(this.sourceToken); } else if (atNextItem || it.value) { start.push(this.sourceToken); map.items.push({ start }); } else { this.stack.push({ type: 'block-map', offset: this.offset, indent: this.indent, items: [{ start: [this.sourceToken] }] }); } this.onKeyLine = true; return; case 'map-value-ind': if (includesToken(it.start, 'explicit-key-ind')) { if (!it.sep) { if (includesToken(it.start, 'newline')) { Object.assign(it, { key: null, sep: [this.sourceToken] }); } else { const start = getFirstKeyStartProps(it.start); this.stack.push({ type: 'block-map', offset: this.offset, indent: this.indent, items: [{ start, key: null, sep: [this.sourceToken] }] }); } } else if (it.value) { map.items.push({ start: [], key: null, sep: [this.sourceToken] }); } else if (includesToken(it.sep, 'map-value-ind')) { this.stack.push({ type: 'block-map', offset: this.offset, indent: this.indent, items: [{ start, key: null, sep: [this.sourceToken] }] }); } else if (isFlowToken(it.key) && !includesToken(it.sep, 'newline')) { const start = getFirstKeyStartProps(it.start); const key = it.key; const sep = it.sep; sep.push(this.sourceToken); // @ts-expect-error type guard is wrong here delete it.key, delete it.sep; this.stack.push({ type: 'block-map', offset: this.offset, indent: this.indent, items: [{ start, key, sep }] }); } else if (start.length > 0) { // Not actually at next item it.sep = it.sep.concat(start, this.sourceToken); } else { it.sep.push(this.sourceToken); } } else { if (!it.sep) { Object.assign(it, { key: null, sep: [this.sourceToken] }); } else if (it.value || atNextItem) { map.items.push({ start, key: null, sep: [this.sourceToken] }); } else if (includesToken(it.sep, 'map-value-ind')) { this.stack.push({ type: 'block-map', offset: this.offset, indent: this.indent, items: [{ start: [], key: null, sep: [this.sourceToken] }] }); } else { it.sep.push(this.sourceToken); } } this.onKeyLine = true; return; case 'alias': case 'scalar': case 'single-quoted-scalar': case 'double-quoted-scalar': { const fs = this.flowScalar(this.type); if (atNextItem || it.value) { map.items.push({ start, key: fs, sep: [] }); this.onKeyLine = true; } else if (it.sep) { this.stack.push(fs); } else { Object.assign(it, { key: fs, sep: [] }); this.onKeyLine = true; } return; } default: { const bv = this.startBlockValue(map); if (bv) { if (atNextItem && bv.type !== 'block-seq' && includesToken(it.start, 'explicit-key-ind')) { map.items.push({ start }); } this.stack.push(bv); return; } } } } yield* this.pop(); yield* this.step(); } *blockSequence(seq) { const it = seq.items[seq.items.length - 1]; switch (this.type) { case 'newline': if (it.value) { const end = 'end' in it.value ? it.value.end : undefined; const last = Array.isArray(end) ? end[end.length - 1] : undefined; if (last?.type === 'comment') end?.push(this.sourceToken); else seq.items.push({ start: [this.sourceToken] }); } else it.start.push(this.sourceToken); return; case 'space': case 'comment': if (it.value) seq.items.push({ start: [this.sourceToken] }); else { if (this.atIndentedComment(it.start, seq.indent)) { const prev = seq.items[seq.items.length - 2]; const end = prev?.value?.end; if (Array.isArray(end)) { Array.prototype.push.apply(end, it.start); end.push(this.sourceToken); seq.items.pop(); return; } } it.start.push(this.sourceToken); } return; case 'anchor': case 'tag': if (it.value || this.indent <= seq.indent) break; it.start.push(this.sourceToken); return; case 'seq-item-ind': if (this.indent !== seq.indent) break; if (it.value || includesToken(it.start, 'seq-item-ind')) seq.items.push({ start: [this.sourceToken] }); else it.start.push(this.sourceToken); return; } if (this.indent > seq.indent) { const bv = this.startBlockValue(seq); if (bv) { this.stack.push(bv); return; } } yield* this.pop(); yield* this.step(); } *flowCollection(fc) { const it = fc.items[fc.items.length - 1]; if (this.type === 'flow-error-end') { let top; do { yield* this.pop(); top = this.peek(1); } while (top && top.type === 'flow-collection'); } else if (fc.end.length === 0) { switch (this.type) { case 'comma': case 'explicit-key-ind': if (!it || it.sep) fc.items.push({ start: [this.sourceToken] }); else it.start.push(this.sourceToken); return; case 'map-value-ind': if (!it || it.value) fc.items.push({ start: [], key: null, sep: [this.sourceToken] }); else if (it.sep) it.sep.push(this.sourceToken); else Object.assign(it, { key: null, sep: [this.sourceToken] }); return; case 'space': case 'comment': case 'newline': case 'anchor': case 'tag': if (!it || it.value) fc.items.push({ start: [this.sourceToken] }); else if (it.sep) it.sep.push(this.sourceToken); else it.start.push(this.sourceToken); return; case 'alias': case 'scalar': case 'single-quoted-scalar': case 'double-quoted-scalar': { const fs = this.flowScalar(this.type); if (!it || it.value) fc.items.push({ start: [], key: fs, sep: [] }); else if (it.sep) this.stack.push(fs); else Object.assign(it, { key: fs, sep: [] }); return; } case 'flow-map-end': case 'flow-seq-end': fc.end.push(this.sourceToken); return; } const bv = this.startBlockValue(fc); /* istanbul ignore else should not happen */ if (bv) this.stack.push(bv); else { yield* this.pop(); yield* this.step(); } } else { const parent = this.peek(2); if (parent.type === 'block-map' && ((this.type === 'map-value-ind' && parent.indent === fc.indent) || (this.type === 'newline' && !parent.items[parent.items.length - 1].sep))) { yield* this.pop(); yield* this.step(); } else if (this.type === 'map-value-ind' && parent.type !== 'flow-collection') { const prev = getPrevProps(parent); const start = getFirstKeyStartProps(prev); fixFlowSeqItems(fc); const sep = fc.end.splice(1, fc.end.length); sep.push(this.sourceToken); const map = { type: 'block-map', offset: fc.offset, indent: fc.indent, items: [{ start, key: fc, sep }] }; this.onKeyLine = true; this.stack[this.stack.length - 1] = map; } else { yield* this.lineEnd(fc); } } } flowScalar(type) { if (this.onNewLine) { let nl = this.source.indexOf('\n') + 1; while (nl !== 0) { this.onNewLine(this.offset + nl); nl = this.source.indexOf('\n', nl) + 1; } } return { type, offset: this.offset, indent: this.indent, source: this.source }; } startBlockValue(parent) { switch (this.type) { case 'alias': case 'scalar': case 'single-quoted-scalar': case 'double-quoted-scalar': return this.flowScalar(this.type); case 'block-scalar-header': return { type: 'block-scalar', offset: this.offset, indent: this.indent, props: [this.sourceToken], source: '' }; case 'flow-map-start': case 'flow-seq-start': return { type: 'flow-collection', offset: this.offset, indent: this.indent, start: this.sourceToken, items: [], end: [] }; case 'seq-item-ind': return { type: 'block-seq', offset: this.offset, indent: this.indent, items: [{ start: [this.sourceToken] }] }; case 'explicit-key-ind': { this.onKeyLine = true; const prev = getPrevProps(parent); const start = getFirstKeyStartProps(prev); start.push(this.sourceToken); return { type: 'block-map', offset: this.offset, indent: this.indent, items: [{ start }] }; } case 'map-value-ind': { this.onKeyLine = true; const prev = getPrevProps(parent); const start = getFirstKeyStartProps(prev); return { type: 'block-map', offset: this.offset, indent: this.indent, items: [{ start, key: null, sep: [this.sourceToken] }] }; } } return null; } atIndentedComment(start, indent) { if (this.type !== 'comment') return false; if (this.indent <= indent) return false; return start.every(st => st.type === 'newline' || st.type === 'space'); } *documentEnd(docEnd) { if (this.type !== 'doc-mode') { if (docEnd.end) docEnd.end.push(this.sourceToken); else docEnd.end = [this.sourceToken]; if (this.type === 'newline') yield* this.pop(); } } *lineEnd(token) { switch (this.type) { case 'comma': case 'doc-start': case 'doc-end': case 'flow-seq-end': case 'flow-map-end': case 'map-value-ind': yield* this.pop(); yield* this.step(); break; case 'newline': this.onKeyLine = false; // fallthrough case 'space': case 'comment': default: // all other values are errors if (token.end) token.end.push(this.sourceToken); else token.end = [this.sourceToken]; if (this.type === 'newline') yield* this.pop(); } } }; function parseOptions(options) { const prettyErrors = options.prettyErrors !== false; const lineCounter = options.lineCounter || (prettyErrors && new LineCounter()) || null; return { lineCounter, prettyErrors }; } /** * Parse the input as a stream of YAML documents. * * Documents should be separated from each other by `...` or `---` marker lines. * * @returns If an empty `docs` array is returned, it will be of type * EmptyStream and contain additional stream information. In * TypeScript, you should use `'empty' in docs` as a type guard for it. */ function parseAllDocuments(source, options = {}) { const { lineCounter, prettyErrors } = parseOptions(options); const parser = new Parser$1(lineCounter?.addNewLine); const composer = new Composer(options); const docs = Array.from(composer.compose(parser.parse(source))); if (prettyErrors && lineCounter) for (const doc of docs) { doc.errors.forEach(prettifyError(source, lineCounter)); doc.warnings.forEach(prettifyError(source, lineCounter)); } if (docs.length > 0) return docs; return Object.assign([], { empty: true }, composer.streamInfo()); } /** Parse an input string into a single YAML.Document */ function parseDocument(source, options = {}) { const { lineCounter, prettyErrors } = parseOptions(options); const parser = new Parser$1(lineCounter?.addNewLine); const composer = new Composer(options); // `doc` is always set by compose.end(true) at the very latest let doc = null; for (const _doc of composer.compose(parser.parse(source), true, source.length)) { if (!doc) doc = _doc; else if (doc.options.logLevel !== 'silent') { doc.errors.push(new YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()')); break; } } if (prettyErrors && lineCounter) { doc.errors.forEach(prettifyError(source, lineCounter)); doc.warnings.forEach(prettifyError(source, lineCounter)); } return doc; } function parse$b(src, reviver, options) { let _reviver = undefined; if (typeof reviver === 'function') { _reviver = reviver; } else if (options === undefined && reviver && typeof reviver === 'object') { options = reviver; } const doc = parseDocument(src, options); if (!doc) return null; doc.warnings.forEach(warning => warn(doc.options.logLevel, warning)); if (doc.errors.length > 0) { if (doc.options.logLevel !== 'silent') throw doc.errors[0]; else doc.errors = []; } return doc.toJS(Object.assign({ reviver: _reviver }, options)); } function stringify(value, replacer, options) { let _replacer = null; if (typeof replacer === 'function' || Array.isArray(replacer)) { _replacer = replacer; } else if (options === undefined && replacer) { options = replacer; } if (typeof options === 'string') options = options.length; if (typeof options === 'number') { const indent = Math.round(options); options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent }; } if (value === undefined) { const { keepUndefined } = options ?? replacer ?? {}; if (!keepUndefined) return undefined; } return new Document(value, _replacer, options).toString(options); } var YAML = { __proto__: null, Alias: Alias, CST: cst, Composer: Composer, Document: Document, Lexer: Lexer, LineCounter: LineCounter, Pair: Pair, Parser: Parser$1, Scalar: Scalar, Schema: Schema, YAMLError: YAMLError, YAMLMap: YAMLMap, YAMLParseError: YAMLParseError, YAMLSeq: YAMLSeq, YAMLWarning: YAMLWarning, isAlias: isAlias, isCollection: isCollection$1, isDocument: isDocument, isMap: isMap, isNode: isNode$1, isPair: isPair, isScalar: isScalar$1, isSeq: isSeq, parse: parse$b, parseAllDocuments: parseAllDocuments, parseDocument: parseDocument, stringify: stringify, visit: visit$1, visitAsync: visitAsync }; // `export * as default from ...` fails on Webpack v4 // https://github.com/eemeli/yaml/issues/228 var browser$2 = { __proto__: null, Alias: Alias, CST: cst, Composer: Composer, Document: Document, Lexer: Lexer, LineCounter: LineCounter, Pair: Pair, Parser: Parser$1, Scalar: Scalar, Schema: Schema, YAMLError: YAMLError, YAMLMap: YAMLMap, YAMLParseError: YAMLParseError, YAMLSeq: YAMLSeq, YAMLWarning: YAMLWarning, default: YAML, isAlias: isAlias, isCollection: isCollection$1, isDocument: isDocument, isMap: isMap, isNode: isNode$1, isPair: isPair, isScalar: isScalar$1, isSeq: isSeq, parse: parse$b, parseAllDocuments: parseAllDocuments, parseDocument: parseDocument, stringify: stringify, visit: visit$1, visitAsync: visitAsync }; var require$$3 = /*@__PURE__*/getAugmentedNamespace(browser$2); // eslint-disable-next-line n/no-deprecated-api const { createRequire, createRequireFromPath } = require$$0$8; function req$2 (name, rootFile) { const create = createRequire || createRequireFromPath; const require = create(rootFile); return require(name) } var req_1 = req$2; const req$1 = req_1; /** * Load Options * * @private * @method options * * @param {Object} config PostCSS Config * * @return {Object} options PostCSS Options */ const options = (config, file) => { if (config.parser && typeof config.parser === 'string') { try { config.parser = req$1(config.parser, file); } catch (err) { throw new Error(`Loading PostCSS Parser failed: ${err.message}\n\n(@${file})`) } } if (config.syntax && typeof config.syntax === 'string') { try { config.syntax = req$1(config.syntax, file); } catch (err) { throw new Error(`Loading PostCSS Syntax failed: ${err.message}\n\n(@${file})`) } } if (config.stringifier && typeof config.stringifier === 'string') { try { config.stringifier = req$1(config.stringifier, file); } catch (err) { throw new Error(`Loading PostCSS Stringifier failed: ${err.message}\n\n(@${file})`) } } if (config.plugins) { delete config.plugins; } return config }; var options_1 = options; const req = req_1; /** * Plugin Loader * * @private * @method load * * @param {String} plugin PostCSS Plugin Name * @param {Object} options PostCSS Plugin Options * * @return {Function} PostCSS Plugin */ const load = (plugin, options, file) => { try { if ( options === null || options === undefined || Object.keys(options).length === 0 ) { return req(plugin, file) } else { return req(plugin, file)(options) } } catch (err) { throw new Error(`Loading PostCSS Plugin failed: ${err.message}\n\n(@${file})`) } }; /** * Load Plugins * * @private * @method plugins * * @param {Object} config PostCSS Config Plugins * * @return {Array} plugins PostCSS Plugins */ const plugins = (config, file) => { let plugins = []; if (Array.isArray(config.plugins)) { plugins = config.plugins.filter(Boolean); } else { plugins = Object.keys(config.plugins) .filter((plugin) => { return config.plugins[plugin] !== false ? plugin : '' }) .map((plugin) => { return load(plugin, config.plugins[plugin], file) }); } if (plugins.length && plugins.length > 0) { plugins.forEach((plugin, i) => { if (plugin.default) { plugin = plugin.default; } if (plugin.postcss === true) { plugin = plugin(); } else if (plugin.postcss) { plugin = plugin.postcss; } if ( // eslint-disable-next-line !( (typeof plugin === 'object' && Array.isArray(plugin.plugins)) || (typeof plugin === 'object' && plugin.postcssPlugin) || (typeof plugin === 'function') ) ) { throw new TypeError(`Invalid PostCSS Plugin found at: plugins[${i}]\n\n(@${file})`) } }); } return plugins }; var plugins_1 = plugins; const resolve = require$$0$4.resolve; const url$4 = require$$0$9; const config$1 = dist; const yaml = require$$3; const loadOptions = options_1; const loadPlugins = plugins_1; /* istanbul ignore next */ const interopRequireDefault = (obj) => obj && obj.__esModule ? obj : { default: obj }; /** * Process the result from cosmiconfig * * @param {Object} ctx Config Context * @param {Object} result Cosmiconfig result * * @return {Object} PostCSS Config */ const processResult = (ctx, result) => { const file = result.filepath || ''; let config = interopRequireDefault(result.config).default || {}; if (typeof config === 'function') { config = config(ctx); } else { config = Object.assign({}, config, ctx); } if (!config.plugins) { config.plugins = []; } return { plugins: loadPlugins(config, file), options: loadOptions(config, file), file } }; /** * Builds the Config Context * * @param {Object} ctx Config Context * * @return {Object} Config Context */ const createContext = (ctx) => { /** * @type {Object} * * @prop {String} cwd=process.cwd() Config search start location * @prop {String} env=process.env.NODE_ENV Config Enviroment, will be set to `development` by `postcss-load-config` if `process.env.NODE_ENV` is `undefined` */ ctx = Object.assign({ cwd: process.cwd(), env: process.env.NODE_ENV }, ctx); if (!ctx.env) { process.env.NODE_ENV = 'development'; } return ctx }; const importDefault = async filepath => { const module = await import(url$4.pathToFileURL(filepath).href); return module.default }; const addTypeScriptLoader = (options = {}, loader) => { const moduleName = 'postcss'; return { ...options, searchPlaces: [ ...(options.searchPlaces || []), 'package.json', `.${moduleName}rc`, `.${moduleName}rc.json`, `.${moduleName}rc.yaml`, `.${moduleName}rc.yml`, `.${moduleName}rc.ts`, `.${moduleName}rc.cts`, `.${moduleName}rc.js`, `.${moduleName}rc.cjs`, `.${moduleName}rc.mjs`, `${moduleName}.config.ts`, `${moduleName}.config.cts`, `${moduleName}.config.js`, `${moduleName}.config.cjs`, `${moduleName}.config.mjs` ], loaders: { ...options.loaders, '.yaml': (filepath, content) => yaml.parse(content), '.yml': (filepath, content) => yaml.parse(content), '.js': importDefault, '.cjs': importDefault, '.mjs': importDefault, '.ts': loader, '.cts': loader } } }; const withTypeScriptLoader = (rcFunc) => { return (ctx, path, options) => { return rcFunc(ctx, path, addTypeScriptLoader(options, (configFile) => { let registerer = { enabled () {} }; try { // Register TypeScript compiler instance registerer = __require('ts-node').register({ // transpile to cjs even if compilerOptions.module in tsconfig is not Node16/NodeNext. moduleTypes: { '**/*.cts': 'cjs' } }); return __require(configFile) } catch (err) { if (err.code === 'MODULE_NOT_FOUND') { throw new Error( `'ts-node' is required for the TypeScript configuration files. Make sure it is installed\nError: ${err.message}` ) } throw err } finally { registerer.enabled(false); } })) } }; /** * Load Config * * @method rc * * @param {Object} ctx Config Context * @param {String} path Config Path * @param {Object} options Config Options * * @return {Promise} config PostCSS Config */ const rc = withTypeScriptLoader((ctx, path, options) => { /** * @type {Object} The full Config Context */ ctx = createContext(ctx); /** * @type {String} `process.cwd()` */ path = path ? resolve(path) : process.cwd(); return config$1.lilconfig('postcss', options) .search(path) .then((result) => { if (!result) { throw new Error(`No PostCSS Config found in: ${path}`) } return processResult(ctx, result) }) }); /** * Autoload Config for PostCSS * * @author Michael Ciniawsky @michael-ciniawsky * @license MIT * * @module postcss-load-config * @version 2.1.0 * * @requires comsiconfig * @requires ./options * @requires ./plugins */ var src$1 = rc; var postcssrc = /*@__PURE__*/getDefaultExportFromCjs(src$1); // Copyright 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Simon Lydell // License: MIT. var Identifier, JSXIdentifier, JSXPunctuator, JSXString, JSXText, KeywordsWithExpressionAfter, KeywordsWithNoLineTerminatorAfter, LineTerminatorSequence, MultiLineComment, Newline, NumericLiteral, Punctuator, RegularExpressionLiteral, SingleLineComment, StringLiteral, Template, TokensNotPrecedingObjectLiteral, TokensPrecedingExpression, WhiteSpace; RegularExpressionLiteral = /\/(?![*\/])(?:\[(?:[^\]\\\n\r\u2028\u2029]+|\\.)*\]|[^\/\\\n\r\u2028\u2029]+|\\.)*(\/[$_\u200C\u200D\p{ID_Continue}]*|\\)?/yu; Punctuator = /--|\+\+|=>|\.{3}|\??\.(?!\d)|(?:&&|\|\||\?\?|[+\-%&|^]|\*{1,2}|<{1,2}|>{1,3}|!=?|={1,2}|\/(?![\/*]))=?|[?~,:;[\](){}]/y; Identifier = /(\x23?)(?=[$_\p{ID_Start}\\])(?:[$_\u200C\u200D\p{ID_Continue}]+|\\u[\da-fA-F]{4}|\\u\{[\da-fA-F]+\})+/yu; StringLiteral = /(['"])(?:[^'"\\\n\r]+|(?!\1)['"]|\\(?:\r\n|[^]))*(\1)?/y; NumericLiteral = /(?:0[xX][\da-fA-F](?:_?[\da-fA-F])*|0[oO][0-7](?:_?[0-7])*|0[bB][01](?:_?[01])*)n?|0n|[1-9](?:_?\d)*n|(?:(?:0(?!\d)|0\d*[89]\d*|[1-9](?:_?\d)*)(?:\.(?:\d(?:_?\d)*)?)?|\.\d(?:_?\d)*)(?:[eE][+-]?\d(?:_?\d)*)?|0[0-7]+/y; Template = /[`}](?:[^`\\$]+|\\[^]|\$(?!\{))*(`|\$\{)?/y; WhiteSpace = /[\t\v\f\ufeff\p{Zs}]+/yu; LineTerminatorSequence = /\r?\n|[\r\u2028\u2029]/y; MultiLineComment = /\/\*(?:[^*]+|\*(?!\/))*(\*\/)?/y; SingleLineComment = /\/\/.*/y; JSXPunctuator = /[<>.:={}]|\/(?![\/*])/y; JSXIdentifier = /[$_\p{ID_Start}][$_\u200C\u200D\p{ID_Continue}-]*/yu; JSXString = /(['"])(?:[^'"]+|(?!\1)['"])*(\1)?/y; JSXText = /[^<>{}]+/y; TokensPrecedingExpression = /^(?:[\/+-]|\.{3}|\?(?:InterpolationIn(?:JSX|Template)|NoLineTerminatorHere|NonExpressionParenEnd|UnaryIncDec))?$|[{}([,;<>=*%&|^!~?:]$/; TokensNotPrecedingObjectLiteral = /^(?:=>|[;\]){}]|else|\?(?:NoLineTerminatorHere|NonExpressionParenEnd))?$/; KeywordsWithExpressionAfter = /^(?:await|case|default|delete|do|else|instanceof|new|return|throw|typeof|void|yield)$/; KeywordsWithNoLineTerminatorAfter = /^(?:return|throw|yield)$/; Newline = RegExp(LineTerminatorSequence.source); var jsTokens_1 = function*(input, {jsx = false} = {}) { var braces, firstCodePoint, isExpression, lastIndex, lastSignificantToken, length, match, mode, nextLastIndex, nextLastSignificantToken, parenNesting, postfixIncDec, punctuator, stack; ({length} = input); lastIndex = 0; lastSignificantToken = ""; stack = [ {tag: "JS"} ]; braces = []; parenNesting = 0; postfixIncDec = false; while (lastIndex < length) { mode = stack[stack.length - 1]; switch (mode.tag) { case "JS": case "JSNonExpressionParen": case "InterpolationInTemplate": case "InterpolationInJSX": if (input[lastIndex] === "/" && (TokensPrecedingExpression.test(lastSignificantToken) || KeywordsWithExpressionAfter.test(lastSignificantToken))) { RegularExpressionLiteral.lastIndex = lastIndex; if (match = RegularExpressionLiteral.exec(input)) { lastIndex = RegularExpressionLiteral.lastIndex; lastSignificantToken = match[0]; postfixIncDec = true; yield ({ type: "RegularExpressionLiteral", value: match[0], closed: match[1] !== void 0 && match[1] !== "\\" }); continue; } } Punctuator.lastIndex = lastIndex; if (match = Punctuator.exec(input)) { punctuator = match[0]; nextLastIndex = Punctuator.lastIndex; nextLastSignificantToken = punctuator; switch (punctuator) { case "(": if (lastSignificantToken === "?NonExpressionParenKeyword") { stack.push({ tag: "JSNonExpressionParen", nesting: parenNesting }); } parenNesting++; postfixIncDec = false; break; case ")": parenNesting--; postfixIncDec = true; if (mode.tag === "JSNonExpressionParen" && parenNesting === mode.nesting) { stack.pop(); nextLastSignificantToken = "?NonExpressionParenEnd"; postfixIncDec = false; } break; case "{": Punctuator.lastIndex = 0; isExpression = !TokensNotPrecedingObjectLiteral.test(lastSignificantToken) && (TokensPrecedingExpression.test(lastSignificantToken) || KeywordsWithExpressionAfter.test(lastSignificantToken)); braces.push(isExpression); postfixIncDec = false; break; case "}": switch (mode.tag) { case "InterpolationInTemplate": if (braces.length === mode.nesting) { Template.lastIndex = lastIndex; match = Template.exec(input); lastIndex = Template.lastIndex; lastSignificantToken = match[0]; if (match[1] === "${") { lastSignificantToken = "?InterpolationInTemplate"; postfixIncDec = false; yield ({ type: "TemplateMiddle", value: match[0] }); } else { stack.pop(); postfixIncDec = true; yield ({ type: "TemplateTail", value: match[0], closed: match[1] === "`" }); } continue; } break; case "InterpolationInJSX": if (braces.length === mode.nesting) { stack.pop(); lastIndex += 1; lastSignificantToken = "}"; yield ({ type: "JSXPunctuator", value: "}" }); continue; } } postfixIncDec = braces.pop(); nextLastSignificantToken = postfixIncDec ? "?ExpressionBraceEnd" : "}"; break; case "]": postfixIncDec = true; break; case "++": case "--": nextLastSignificantToken = postfixIncDec ? "?PostfixIncDec" : "?UnaryIncDec"; break; case "<": if (jsx && (TokensPrecedingExpression.test(lastSignificantToken) || KeywordsWithExpressionAfter.test(lastSignificantToken))) { stack.push({tag: "JSXTag"}); lastIndex += 1; lastSignificantToken = "<"; yield ({ type: "JSXPunctuator", value: punctuator }); continue; } postfixIncDec = false; break; default: postfixIncDec = false; } lastIndex = nextLastIndex; lastSignificantToken = nextLastSignificantToken; yield ({ type: "Punctuator", value: punctuator }); continue; } Identifier.lastIndex = lastIndex; if (match = Identifier.exec(input)) { lastIndex = Identifier.lastIndex; nextLastSignificantToken = match[0]; switch (match[0]) { case "for": case "if": case "while": case "with": if (lastSignificantToken !== "." && lastSignificantToken !== "?.") { nextLastSignificantToken = "?NonExpressionParenKeyword"; } } lastSignificantToken = nextLastSignificantToken; postfixIncDec = !KeywordsWithExpressionAfter.test(match[0]); yield ({ type: match[1] === "#" ? "PrivateIdentifier" : "IdentifierName", value: match[0] }); continue; } StringLiteral.lastIndex = lastIndex; if (match = StringLiteral.exec(input)) { lastIndex = StringLiteral.lastIndex; lastSignificantToken = match[0]; postfixIncDec = true; yield ({ type: "StringLiteral", value: match[0], closed: match[2] !== void 0 }); continue; } NumericLiteral.lastIndex = lastIndex; if (match = NumericLiteral.exec(input)) { lastIndex = NumericLiteral.lastIndex; lastSignificantToken = match[0]; postfixIncDec = true; yield ({ type: "NumericLiteral", value: match[0] }); continue; } Template.lastIndex = lastIndex; if (match = Template.exec(input)) { lastIndex = Template.lastIndex; lastSignificantToken = match[0]; if (match[1] === "${") { lastSignificantToken = "?InterpolationInTemplate"; stack.push({ tag: "InterpolationInTemplate", nesting: braces.length }); postfixIncDec = false; yield ({ type: "TemplateHead", value: match[0] }); } else { postfixIncDec = true; yield ({ type: "NoSubstitutionTemplate", value: match[0], closed: match[1] === "`" }); } continue; } break; case "JSXTag": case "JSXTagEnd": JSXPunctuator.lastIndex = lastIndex; if (match = JSXPunctuator.exec(input)) { lastIndex = JSXPunctuator.lastIndex; nextLastSignificantToken = match[0]; switch (match[0]) { case "<": stack.push({tag: "JSXTag"}); break; case ">": stack.pop(); if (lastSignificantToken === "/" || mode.tag === "JSXTagEnd") { nextLastSignificantToken = "?JSX"; postfixIncDec = true; } else { stack.push({tag: "JSXChildren"}); } break; case "{": stack.push({ tag: "InterpolationInJSX", nesting: braces.length }); nextLastSignificantToken = "?InterpolationInJSX"; postfixIncDec = false; break; case "/": if (lastSignificantToken === "<") { stack.pop(); if (stack[stack.length - 1].tag === "JSXChildren") { stack.pop(); } stack.push({tag: "JSXTagEnd"}); } } lastSignificantToken = nextLastSignificantToken; yield ({ type: "JSXPunctuator", value: match[0] }); continue; } JSXIdentifier.lastIndex = lastIndex; if (match = JSXIdentifier.exec(input)) { lastIndex = JSXIdentifier.lastIndex; lastSignificantToken = match[0]; yield ({ type: "JSXIdentifier", value: match[0] }); continue; } JSXString.lastIndex = lastIndex; if (match = JSXString.exec(input)) { lastIndex = JSXString.lastIndex; lastSignificantToken = match[0]; yield ({ type: "JSXString", value: match[0], closed: match[2] !== void 0 }); continue; } break; case "JSXChildren": JSXText.lastIndex = lastIndex; if (match = JSXText.exec(input)) { lastIndex = JSXText.lastIndex; lastSignificantToken = match[0]; yield ({ type: "JSXText", value: match[0] }); continue; } switch (input[lastIndex]) { case "<": stack.push({tag: "JSXTag"}); lastIndex++; lastSignificantToken = "<"; yield ({ type: "JSXPunctuator", value: "<" }); continue; case "{": stack.push({ tag: "InterpolationInJSX", nesting: braces.length }); lastIndex++; lastSignificantToken = "?InterpolationInJSX"; postfixIncDec = false; yield ({ type: "JSXPunctuator", value: "{" }); continue; } } WhiteSpace.lastIndex = lastIndex; if (match = WhiteSpace.exec(input)) { lastIndex = WhiteSpace.lastIndex; yield ({ type: "WhiteSpace", value: match[0] }); continue; } LineTerminatorSequence.lastIndex = lastIndex; if (match = LineTerminatorSequence.exec(input)) { lastIndex = LineTerminatorSequence.lastIndex; postfixIncDec = false; if (KeywordsWithNoLineTerminatorAfter.test(lastSignificantToken)) { lastSignificantToken = "?NoLineTerminatorHere"; } yield ({ type: "LineTerminatorSequence", value: match[0] }); continue; } MultiLineComment.lastIndex = lastIndex; if (match = MultiLineComment.exec(input)) { lastIndex = MultiLineComment.lastIndex; if (Newline.test(match[0])) { postfixIncDec = false; if (KeywordsWithNoLineTerminatorAfter.test(lastSignificantToken)) { lastSignificantToken = "?NoLineTerminatorHere"; } } yield ({ type: "MultiLineComment", value: match[0], closed: match[1] !== void 0 }); continue; } SingleLineComment.lastIndex = lastIndex; if (match = SingleLineComment.exec(input)) { lastIndex = SingleLineComment.lastIndex; postfixIncDec = false; yield ({ type: "SingleLineComment", value: match[0] }); continue; } firstCodePoint = String.fromCodePoint(input.codePointAt(lastIndex)); lastIndex += firstCodePoint.length; lastSignificantToken = firstCodePoint; postfixIncDec = false; yield ({ type: mode.tag.startsWith("JSX") ? "JSXInvalid" : "Invalid", value: firstCodePoint }); } return void 0; }; var jsTokens = /*@__PURE__*/getDefaultExportFromCjs(jsTokens_1); function stripLiteralJsTokens(code, options) { const FILL = options?.fillChar ?? " "; const FILL_COMMENT = " "; let result = ""; const filter = options?.filter ?? (() => true); const tokens = []; for (const token of jsTokens(code, { jsx: false })) { tokens.push(token); if (token.type === "SingleLineComment") { result += FILL_COMMENT.repeat(token.value.length); continue; } if (token.type === "MultiLineComment") { result += token.value.replace(/[^\n]/g, FILL_COMMENT); continue; } if (token.type === "StringLiteral") { const body = token.value.slice(1, -1); if (filter(body)) { result += token.value[0] + FILL.repeat(body.length) + token.value[token.value.length - 1]; continue; } } if (token.type === "NoSubstitutionTemplate") { const body = token.value.slice(1, -1); if (filter(body)) { result += `\`${body.replace(/[^\n]/g, FILL)}\``; continue; } } if (token.type === "RegularExpressionLiteral") { const body = token.value; if (filter(body)) { result += body.replace(/\/(.*)\/(\w?)$/g, (_, $1, $2) => `/${FILL.repeat($1.length)}/${$2}`); continue; } } if (token.type === "TemplateHead") { const body = token.value.slice(1, -2); if (filter(body)) { result += `\`${body.replace(/[^\n]/g, FILL)}\${`; continue; } } if (token.type === "TemplateTail") { const body = token.value.slice(0, -2); if (filter(body)) { result += `}${body.replace(/[^\n]/g, FILL)}\``; continue; } } if (token.type === "TemplateMiddle") { const body = token.value.slice(1, -2); if (filter(body)) { result += `}${body.replace(/[^\n]/g, FILL)}\${`; continue; } } result += token.value; } return { result, tokens }; } function stripLiteral(code, options) { return stripLiteralDetailed(code, options).result; } function stripLiteralDetailed(code, options) { return stripLiteralJsTokens(code, options); } var main$1 = {exports: {}}; var name = "dotenv"; var version$2 = "16.4.5"; var description = "Loads environment variables from .env file"; var main = "lib/main.js"; var types$2 = "lib/main.d.ts"; var exports = { ".": { types: "./lib/main.d.ts", require: "./lib/main.js", "default": "./lib/main.js" }, "./config": "./config.js", "./config.js": "./config.js", "./lib/env-options": "./lib/env-options.js", "./lib/env-options.js": "./lib/env-options.js", "./lib/cli-options": "./lib/cli-options.js", "./lib/cli-options.js": "./lib/cli-options.js", "./package.json": "./package.json" }; var scripts = { "dts-check": "tsc --project tests/types/tsconfig.json", lint: "standard", "lint-readme": "standard-markdown", pretest: "npm run lint && npm run dts-check", test: "tap tests/*.js --100 -Rspec", "test:coverage": "tap --coverage-report=lcov", prerelease: "npm test", release: "standard-version" }; var repository = { type: "git", url: "git://github.com/motdotla/dotenv.git" }; var funding = "https://dotenvx.com"; var keywords$2 = [ "dotenv", "env", ".env", "environment", "variables", "config", "settings" ]; var readmeFilename = "README.md"; var license = "BSD-2-Clause"; var devDependencies = { "@definitelytyped/dtslint": "^0.0.133", "@types/node": "^18.11.3", decache: "^4.6.1", sinon: "^14.0.1", standard: "^17.0.0", "standard-markdown": "^7.1.0", "standard-version": "^9.5.0", tap: "^16.3.0", tar: "^6.1.11", typescript: "^4.8.4" }; var engines = { node: ">=12" }; var browser$1 = { fs: false }; var require$$4 = { name: name, version: version$2, description: description, main: main, types: types$2, exports: exports, scripts: scripts, repository: repository, funding: funding, keywords: keywords$2, readmeFilename: readmeFilename, license: license, devDependencies: devDependencies, engines: engines, browser: browser$1 }; const fs$9 = require$$0__default; const path$9 = require$$0$4; const os$2 = require$$2; const crypto$1 = require$$0$a; const packageJson = require$$4; const version$1 = packageJson.version; const LINE = /(?:^|^)\s*(?:export\s+)?([\w.-]+)(?:\s*=\s*?|:\s+?)(\s*'(?:\\'|[^'])*'|\s*"(?:\\"|[^"])*"|\s*`(?:\\`|[^`])*`|[^#\r\n]+)?\s*(?:#.*)?(?:$|$)/mg; // Parse src into an Object function parse$a (src) { const obj = {}; // Convert buffer to string let lines = src.toString(); // Convert line breaks to same format lines = lines.replace(/\r\n?/mg, '\n'); let match; while ((match = LINE.exec(lines)) != null) { const key = match[1]; // Default undefined or null to empty string let value = (match[2] || ''); // Remove whitespace value = value.trim(); // Check if double quoted const maybeQuote = value[0]; // Remove surrounding quotes value = value.replace(/^(['"`])([\s\S]*)\1$/mg, '$2'); // Expand newlines if double quoted if (maybeQuote === '"') { value = value.replace(/\\n/g, '\n'); value = value.replace(/\\r/g, '\r'); } // Add to object obj[key] = value; } return obj } function _parseVault (options) { const vaultPath = _vaultPath(options); // Parse .env.vault const result = DotenvModule.configDotenv({ path: vaultPath }); if (!result.parsed) { const err = new Error(`MISSING_DATA: Cannot parse ${vaultPath} for an unknown reason`); err.code = 'MISSING_DATA'; throw err } // handle scenario for comma separated keys - for use with key rotation // example: DOTENV_KEY="dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=prod,dotenv://:key_7890@dotenvx.com/vault/.env.vault?environment=prod" const keys = _dotenvKey(options).split(','); const length = keys.length; let decrypted; for (let i = 0; i < length; i++) { try { // Get full key const key = keys[i].trim(); // Get instructions for decrypt const attrs = _instructions(result, key); // Decrypt decrypted = DotenvModule.decrypt(attrs.ciphertext, attrs.key); break } catch (error) { // last key if (i + 1 >= length) { throw error } // try next key } } // Parse decrypted .env string return DotenvModule.parse(decrypted) } function _log (message) { console.log(`[dotenv@${version$1}][INFO] ${message}`); } function _warn (message) { console.log(`[dotenv@${version$1}][WARN] ${message}`); } function _debug (message) { console.log(`[dotenv@${version$1}][DEBUG] ${message}`); } function _dotenvKey (options) { // prioritize developer directly setting options.DOTENV_KEY if (options && options.DOTENV_KEY && options.DOTENV_KEY.length > 0) { return options.DOTENV_KEY } // secondary infra already contains a DOTENV_KEY environment variable if (process.env.DOTENV_KEY && process.env.DOTENV_KEY.length > 0) { return process.env.DOTENV_KEY } // fallback to empty string return '' } function _instructions (result, dotenvKey) { // Parse DOTENV_KEY. Format is a URI let uri; try { uri = new URL(dotenvKey); } catch (error) { if (error.code === 'ERR_INVALID_URL') { const err = new Error('INVALID_DOTENV_KEY: Wrong format. Must be in valid uri format like dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=development'); err.code = 'INVALID_DOTENV_KEY'; throw err } throw error } // Get decrypt key const key = uri.password; if (!key) { const err = new Error('INVALID_DOTENV_KEY: Missing key part'); err.code = 'INVALID_DOTENV_KEY'; throw err } // Get environment const environment = uri.searchParams.get('environment'); if (!environment) { const err = new Error('INVALID_DOTENV_KEY: Missing environment part'); err.code = 'INVALID_DOTENV_KEY'; throw err } // Get ciphertext payload const environmentKey = `DOTENV_VAULT_${environment.toUpperCase()}`; const ciphertext = result.parsed[environmentKey]; // DOTENV_VAULT_PRODUCTION if (!ciphertext) { const err = new Error(`NOT_FOUND_DOTENV_ENVIRONMENT: Cannot locate environment ${environmentKey} in your .env.vault file.`); err.code = 'NOT_FOUND_DOTENV_ENVIRONMENT'; throw err } return { ciphertext, key } } function _vaultPath (options) { let possibleVaultPath = null; if (options && options.path && options.path.length > 0) { if (Array.isArray(options.path)) { for (const filepath of options.path) { if (fs$9.existsSync(filepath)) { possibleVaultPath = filepath.endsWith('.vault') ? filepath : `${filepath}.vault`; } } } else { possibleVaultPath = options.path.endsWith('.vault') ? options.path : `${options.path}.vault`; } } else { possibleVaultPath = path$9.resolve(process.cwd(), '.env.vault'); } if (fs$9.existsSync(possibleVaultPath)) { return possibleVaultPath } return null } function _resolveHome (envPath) { return envPath[0] === '~' ? path$9.join(os$2.homedir(), envPath.slice(1)) : envPath } function _configVault (options) { _log('Loading env from encrypted .env.vault'); const parsed = DotenvModule._parseVault(options); let processEnv = process.env; if (options && options.processEnv != null) { processEnv = options.processEnv; } DotenvModule.populate(processEnv, parsed, options); return { parsed } } function configDotenv (options) { const dotenvPath = path$9.resolve(process.cwd(), '.env'); let encoding = 'utf8'; const debug = Boolean(options && options.debug); if (options && options.encoding) { encoding = options.encoding; } else { if (debug) { _debug('No encoding is specified. UTF-8 is used by default'); } } let optionPaths = [dotenvPath]; // default, look for .env if (options && options.path) { if (!Array.isArray(options.path)) { optionPaths = [_resolveHome(options.path)]; } else { optionPaths = []; // reset default for (const filepath of options.path) { optionPaths.push(_resolveHome(filepath)); } } } // Build the parsed data in a temporary object (because we need to return it). Once we have the final // parsed data, we will combine it with process.env (or options.processEnv if provided). let lastError; const parsedAll = {}; for (const path of optionPaths) { try { // Specifying an encoding returns a string instead of a buffer const parsed = DotenvModule.parse(fs$9.readFileSync(path, { encoding })); DotenvModule.populate(parsedAll, parsed, options); } catch (e) { if (debug) { _debug(`Failed to load ${path} ${e.message}`); } lastError = e; } } let processEnv = process.env; if (options && options.processEnv != null) { processEnv = options.processEnv; } DotenvModule.populate(processEnv, parsedAll, options); if (lastError) { return { parsed: parsedAll, error: lastError } } else { return { parsed: parsedAll } } } // Populates process.env from .env file function config (options) { // fallback to original dotenv if DOTENV_KEY is not set if (_dotenvKey(options).length === 0) { return DotenvModule.configDotenv(options) } const vaultPath = _vaultPath(options); // dotenvKey exists but .env.vault file does not exist if (!vaultPath) { _warn(`You set DOTENV_KEY but you are missing a .env.vault file at ${vaultPath}. Did you forget to build it?`); return DotenvModule.configDotenv(options) } return DotenvModule._configVault(options) } function decrypt (encrypted, keyStr) { const key = Buffer.from(keyStr.slice(-64), 'hex'); let ciphertext = Buffer.from(encrypted, 'base64'); const nonce = ciphertext.subarray(0, 12); const authTag = ciphertext.subarray(-16); ciphertext = ciphertext.subarray(12, -16); try { const aesgcm = crypto$1.createDecipheriv('aes-256-gcm', key, nonce); aesgcm.setAuthTag(authTag); return `${aesgcm.update(ciphertext)}${aesgcm.final()}` } catch (error) { const isRange = error instanceof RangeError; const invalidKeyLength = error.message === 'Invalid key length'; const decryptionFailed = error.message === 'Unsupported state or unable to authenticate data'; if (isRange || invalidKeyLength) { const err = new Error('INVALID_DOTENV_KEY: It must be 64 characters long (or more)'); err.code = 'INVALID_DOTENV_KEY'; throw err } else if (decryptionFailed) { const err = new Error('DECRYPTION_FAILED: Please check your DOTENV_KEY'); err.code = 'DECRYPTION_FAILED'; throw err } else { throw error } } } // Populate process.env with parsed values function populate (processEnv, parsed, options = {}) { const debug = Boolean(options && options.debug); const override = Boolean(options && options.override); if (typeof parsed !== 'object') { const err = new Error('OBJECT_REQUIRED: Please check the processEnv argument being passed to populate'); err.code = 'OBJECT_REQUIRED'; throw err } // Set process.env for (const key of Object.keys(parsed)) { if (Object.prototype.hasOwnProperty.call(processEnv, key)) { if (override === true) { processEnv[key] = parsed[key]; } if (debug) { if (override === true) { _debug(`"${key}" is already defined and WAS overwritten`); } else { _debug(`"${key}" is already defined and was NOT overwritten`); } } } else { processEnv[key] = parsed[key]; } } } const DotenvModule = { configDotenv, _configVault, _parseVault, config, decrypt, parse: parse$a, populate }; main$1.exports.configDotenv = DotenvModule.configDotenv; main$1.exports._configVault = DotenvModule._configVault; main$1.exports._parseVault = DotenvModule._parseVault; main$1.exports.config = DotenvModule.config; main$1.exports.decrypt = DotenvModule.decrypt; var parse_1$1 = main$1.exports.parse = DotenvModule.parse; main$1.exports.populate = DotenvModule.populate; main$1.exports = DotenvModule; // * / // * (\\)? # is it escaped with a backslash? // * (\$) # literal $ // * (?!\() # shouldnt be followed by parenthesis // * (\{?) # first brace wrap opening // * ([\w.]+) # key // * (?::-((?:\$\{(?:\$\{(?:\$\{[^}]*\}|[^}])*}|[^}])*}|[^}])+))? # optional default nested 3 times // * (\}?) # last brace warp closing // * /xi const DOTENV_SUBSTITUTION_REGEX = /(\\)?(\$)(?!\()(\{?)([\w.]+)(?::?-((?:\$\{(?:\$\{(?:\$\{[^}]*\}|[^}])*}|[^}])*}|[^}])+))?(\}?)/gi; function _resolveEscapeSequences (value) { return value.replace(/\\\$/g, '$') } function interpolate (value, processEnv, parsed) { return value.replace(DOTENV_SUBSTITUTION_REGEX, (match, escaped, dollarSign, openBrace, key, defaultValue, closeBrace) => { if (escaped === '\\') { return match.slice(1) } else { if (processEnv[key]) { if (processEnv[key] === parsed[key]) { return processEnv[key] } else { // scenario: PASSWORD_EXPAND_NESTED=${PASSWORD_EXPAND} return interpolate(processEnv[key], processEnv, parsed) } } if (parsed[key]) { // avoid recursion from EXPAND_SELF=$EXPAND_SELF if (parsed[key] === value) { return parsed[key] } else { return interpolate(parsed[key], processEnv, parsed) } } if (defaultValue) { if (defaultValue.startsWith('$')) { return interpolate(defaultValue, processEnv, parsed) } else { return defaultValue } } return '' } }) } function expand (options) { let processEnv = process.env; if (options && options.processEnv != null) { processEnv = options.processEnv; } for (const key in options.parsed) { let value = options.parsed[key]; const inProcessEnv = Object.prototype.hasOwnProperty.call(processEnv, key); if (inProcessEnv) { if (processEnv[key] === options.parsed[key]) { // assume was set to processEnv from the .env file if the values match and therefore interpolate value = interpolate(value, processEnv, options.parsed); } else { // do not interpolate - assume processEnv had the intended value even if containing a $. value = processEnv[key]; } } else { // not inProcessEnv so assume interpolation for this .env key value = interpolate(value, processEnv, options.parsed); } options.parsed[key] = _resolveEscapeSequences(value); } for (const processKey in options.parsed) { processEnv[processKey] = options.parsed[processKey]; } return options } var expand_1 = expand; function getEnvFilesForMode(mode, envDir) { return [ /** default file */ `.env`, /** local file */ `.env.local`, /** mode file */ `.env.${mode}`, /** mode local file */ `.env.${mode}.local`, ].map((file) => normalizePath$3(path$o.join(envDir, file))); } function loadEnv(mode, envDir, prefixes = 'VITE_') { if (mode === 'local') { throw new Error(`"local" cannot be used as a mode name because it conflicts with ` + `the .local postfix for .env files.`); } prefixes = arraify(prefixes); const env = {}; const envFiles = getEnvFilesForMode(mode, envDir); const parsed = Object.fromEntries(envFiles.flatMap((filePath) => { if (!tryStatSync(filePath)?.isFile()) return []; return Object.entries(parse_1$1(fs$l.readFileSync(filePath))); })); // test NODE_ENV override before expand as otherwise process.env.NODE_ENV would override this if (parsed.NODE_ENV && process.env.VITE_USER_NODE_ENV === undefined) { process.env.VITE_USER_NODE_ENV = parsed.NODE_ENV; } // support BROWSER and BROWSER_ARGS env variables if (parsed.BROWSER && process.env.BROWSER === undefined) { process.env.BROWSER = parsed.BROWSER; } if (parsed.BROWSER_ARGS && process.env.BROWSER_ARGS === undefined) { process.env.BROWSER_ARGS = parsed.BROWSER_ARGS; } // let environment variables use each other. make a copy of `process.env` so that `dotenv-expand` // doesn't re-assign the expanded values to the global `process.env`. const processEnv = { ...process.env }; expand_1({ parsed, processEnv }); // only keys that start with prefix are exposed to client for (const [key, value] of Object.entries(parsed)) { if (prefixes.some((prefix) => key.startsWith(prefix))) { env[key] = value; } } // check if there are actual env variables starting with VITE_* // these are typically provided inline and should be prioritized for (const key in process.env) { if (prefixes.some((prefix) => key.startsWith(prefix))) { env[key] = process.env[key]; } } return env; } function resolveEnvPrefix({ envPrefix = 'VITE_', }) { envPrefix = arraify(envPrefix); if (envPrefix.includes('')) { throw new Error(`envPrefix option contains value '', which could lead unexpected exposure of sensitive information.`); } return envPrefix; } const modulePreloadPolyfillId = 'vite/modulepreload-polyfill'; const resolvedModulePreloadPolyfillId = '\0' + modulePreloadPolyfillId + '.js'; function modulePreloadPolyfillPlugin(config) { // `isModernFlag` is only available during build since it is resolved by `vite:build-import-analysis` const skip = config.command !== 'build' || config.build.ssr; let polyfillString; return { name: 'vite:modulepreload-polyfill', resolveId(id) { if (id === modulePreloadPolyfillId) { return resolvedModulePreloadPolyfillId; } }, load(id) { if (id === resolvedModulePreloadPolyfillId) { if (skip) { return ''; } if (!polyfillString) { polyfillString = `${isModernFlag}&&(${polyfill.toString()}());`; } return { code: polyfillString, moduleSideEffects: true }; } }, }; } function polyfill() { const relList = document.createElement('link').relList; if (relList && relList.supports && relList.supports('modulepreload')) { return; } for (const link of document.querySelectorAll('link[rel="modulepreload"]')) { processPreload(link); } new MutationObserver((mutations) => { for (const mutation of mutations) { if (mutation.type !== 'childList') { continue; } for (const node of mutation.addedNodes) { if (node.tagName === 'LINK' && node.rel === 'modulepreload') processPreload(node); } } }).observe(document, { childList: true, subtree: true }); function getFetchOpts(link) { const fetchOpts = {}; if (link.integrity) fetchOpts.integrity = link.integrity; if (link.referrerPolicy) fetchOpts.referrerPolicy = link.referrerPolicy; if (link.crossOrigin === 'use-credentials') fetchOpts.credentials = 'include'; else if (link.crossOrigin === 'anonymous') fetchOpts.credentials = 'omit'; else fetchOpts.credentials = 'same-origin'; return fetchOpts; } function processPreload(link) { if (link.ep) // ep marker = processed return; link.ep = true; // prepopulate the load record const fetchOpts = getFetchOpts(link); fetch(link.href, fetchOpts); } } const htmlProxyRE$1 = /\?html-proxy=?(?:&inline-css)?(?:&style-attr)?&index=(\d+)\.(js|css)$/; const isHtmlProxyRE = /\?html-proxy\b/; const inlineCSSRE$1 = /__VITE_INLINE_CSS__([a-z\d]{8}_\d+)__/g; // Do not allow preceding '.', but do allow preceding '...' for spread operations const inlineImportRE = /(?]*type\s*=\s*(?:"importmap"|'importmap'|importmap)[^>]*>.*?<\/script>/is; const moduleScriptRE = /[ \t]*]*type\s*=\s*(?:"module"|'module'|module)[^>]*>/i; const modulePreloadLinkRE = /[ \t]*]*rel\s*=\s*(?:"modulepreload"|'modulepreload'|modulepreload)[\s\S]*?\/>/i; const importMapAppendRE = new RegExp([moduleScriptRE, modulePreloadLinkRE].map((r) => r.source).join('|'), 'i'); const isHTMLProxy = (id) => isHtmlProxyRE.test(id); const isHTMLRequest = (request) => htmlLangRE.test(request); // HTML Proxy Caches are stored by config -> filePath -> index const htmlProxyMap = new WeakMap(); // HTML Proxy Transform result are stored by config // `${hash(importer)}_${query.index}` -> transformed css code // PS: key like `hash(/vite/playground/assets/index.html)_1`) const htmlProxyResult = new Map(); function htmlInlineProxyPlugin(config) { // Should do this when `constructor` rather than when `buildStart`, // `buildStart` will be triggered multiple times then the cached result will be emptied. // https://github.com/vitejs/vite/issues/6372 htmlProxyMap.set(config, new Map()); return { name: 'vite:html-inline-proxy', resolveId(id) { if (isHTMLProxy(id)) { return id; } }, load(id) { const proxyMatch = id.match(htmlProxyRE$1); if (proxyMatch) { const index = Number(proxyMatch[1]); const file = cleanUrl(id); const url = file.replace(normalizePath$3(config.root), ''); const result = htmlProxyMap.get(config).get(url)?.[index]; if (result) { return result; } else { throw new Error(`No matching HTML proxy module found from ${id}`); } } }, }; } function addToHTMLProxyCache(config, filePath, index, result) { if (!htmlProxyMap.get(config)) { htmlProxyMap.set(config, new Map()); } if (!htmlProxyMap.get(config).get(filePath)) { htmlProxyMap.get(config).set(filePath, []); } htmlProxyMap.get(config).get(filePath)[index] = result; } function addToHTMLProxyTransformResult(hash, code) { htmlProxyResult.set(hash, code); } // this extends the config in @vue/compiler-sfc with const assetAttrsConfig = { link: ['href'], video: ['src', 'poster'], source: ['src', 'srcset'], img: ['src', 'srcset'], image: ['xlink:href', 'href'], use: ['xlink:href', 'href'], }; // Some `` elements should not be inlined in build. Excluding: // - `shortcut` : only valid for IE <9, use `icon` // - `mask-icon` : deprecated since Safari 12 (for pinned tabs) // - `apple-touch-icon-precomposed` : only valid for iOS <7 (for avoiding gloss effect) const noInlineLinkRels = new Set([ 'icon', 'apple-touch-icon', 'apple-touch-startup-image', 'manifest', ]); const isAsyncScriptMap = new WeakMap(); function nodeIsElement(node) { return node.nodeName[0] !== '#'; } function traverseNodes(node, visitor) { visitor(node); if (nodeIsElement(node) || node.nodeName === '#document' || node.nodeName === '#document-fragment') { node.childNodes.forEach((childNode) => traverseNodes(childNode, visitor)); } } async function traverseHtml(html, filePath, visitor) { // lazy load compiler const { parse } = await import('./dep-kjUoH5nk.js'); const ast = parse(html, { scriptingEnabled: false, sourceCodeLocationInfo: true, onParseError: (e) => { handleParseError(e, html, filePath); }, }); traverseNodes(ast, visitor); } function getScriptInfo(node) { let src; let sourceCodeLocation; let isModule = false; let isAsync = false; for (const p of node.attrs) { if (p.prefix !== undefined) continue; if (p.name === 'src') { if (!src) { src = p; sourceCodeLocation = node.sourceCodeLocation?.attrs['src']; } } else if (p.name === 'type' && p.value && p.value === 'module') { isModule = true; } else if (p.name === 'async') { isAsync = true; } } return { src, sourceCodeLocation, isModule, isAsync }; } const attrValueStartRE = /=\s*(.)/; function overwriteAttrValue(s, sourceCodeLocation, newValue) { const srcString = s.slice(sourceCodeLocation.startOffset, sourceCodeLocation.endOffset); const valueStart = srcString.match(attrValueStartRE); if (!valueStart) { // overwrite attr value can only be called for a well-defined value throw new Error(`[vite:html] internal error, failed to overwrite attribute value`); } const wrapOffset = valueStart[1] === '"' || valueStart[1] === "'" ? 1 : 0; const valueOffset = valueStart.index + valueStart[0].length - 1; s.update(sourceCodeLocation.startOffset + valueOffset + wrapOffset, sourceCodeLocation.endOffset - wrapOffset, newValue); return s; } /** * Format parse5 @type {ParserError} to @type {RollupError} */ function formatParseError(parserError, id, html) { const formattedError = { code: parserError.code, message: `parse5 error code ${parserError.code}`, frame: generateCodeFrame(html, parserError.startOffset, parserError.endOffset), loc: { file: id, line: parserError.startLine, column: parserError.startCol, }, }; return formattedError; } function handleParseError(parserError, html, filePath) { switch (parserError.code) { case 'missing-doctype': // ignore missing DOCTYPE return; case 'abandoned-head-element-child': // Accept elements without closing tag in return; case 'duplicate-attribute': // Accept duplicate attributes #9566 // The first attribute is used, browsers silently ignore duplicates return; case 'non-void-html-element-start-tag-with-trailing-solidus': // Allow self closing on non-void elements #10439 return; } const parseError = formatParseError(parserError, filePath, html); throw new Error(`Unable to parse HTML; ${parseError.message}\n` + ` at ${parseError.loc.file}:${parseError.loc.line}:${parseError.loc.column}\n` + `${parseError.frame}`); } /** * Compiles index.html into an entry js module */ function buildHtmlPlugin(config) { const [preHooks, normalHooks, postHooks] = resolveHtmlTransforms(config.plugins, config.logger); preHooks.unshift(preImportMapHook(config)); preHooks.push(htmlEnvHook(config)); postHooks.push(postImportMapHook()); const processedHtml = new Map(); const isExcludedUrl = (url) => url[0] === '#' || isExternalUrl(url) || isDataUrl(url); // Same reason with `htmlInlineProxyPlugin` isAsyncScriptMap.set(config, new Map()); return { name: 'vite:build-html', async transform(html, id) { if (id.endsWith('.html')) { id = normalizePath$3(id); const relativeUrlPath = path$o.posix.relative(config.root, id); const publicPath = `/${relativeUrlPath}`; const publicBase = getBaseInHTML(relativeUrlPath, config); const publicToRelative = (filename, importer) => publicBase + filename; const toOutputPublicFilePath = (url) => toOutputFilePathInHtml(url.slice(1), 'public', relativeUrlPath, 'html', config, publicToRelative); // Determines true start position for the node, either the < character // position, or the newline at the end of the previous line's node. const nodeStartWithLeadingWhitespace = (node) => { const startOffset = node.sourceCodeLocation.startOffset; if (startOffset === 0) return 0; // Gets the offset for the start of the line including the // newline trailing the previous node const lineStartOffset = startOffset - node.sourceCodeLocation.startCol; // // // // Here we want to target the newline at the end of the previous line // as the start position for our target. // // // // // However, if there is content between our target node start and the // previous newline, we cannot strip it out without risking content deletion. let isLineEmpty = false; try { const line = s.slice(Math.max(0, lineStartOffset), startOffset); isLineEmpty = !line.trim(); } catch { // magic-string may throw if there's some content removed in the sliced string, // which we ignore and assume the line is not empty } return isLineEmpty ? lineStartOffset : startOffset; }; // pre-transform html = await applyHtmlTransforms(html, preHooks, { path: publicPath, filename: id, }); let js = ''; const s = new MagicString(html); const scriptUrls = []; const styleUrls = []; let inlineModuleIndex = -1; let everyScriptIsAsync = true; let someScriptsAreAsync = false; let someScriptsAreDefer = false; const assetUrlsPromises = []; // for each encountered asset url, rewrite original html so that it // references the post-build location, ignoring empty attributes and // attributes that directly reference named output. const namedOutput = Object.keys(config?.build?.rollupOptions?.input || {}); const processAssetUrl = async (url, shouldInline) => { if (url !== '' && // Empty attribute !namedOutput.includes(url) && // Direct reference to named output !namedOutput.includes(removeLeadingSlash(url)) // Allow for absolute references as named output can't be an absolute path ) { try { return await urlToBuiltUrl(url, id, config, this, shouldInline); } catch (e) { if (e.code !== 'ENOENT') { throw e; } } } return url; }; await traverseHtml(html, id, (node) => { if (!nodeIsElement(node)) { return; } let shouldRemove = false; // script tags if (node.nodeName === 'script') { const { src, sourceCodeLocation, isModule, isAsync } = getScriptInfo(node); const url = src && src.value; const isPublicFile = !!(url && checkPublicFile(url, config)); if (isPublicFile) { // referencing public dir url, prefix with base overwriteAttrValue(s, sourceCodeLocation, toOutputPublicFilePath(url)); } if (isModule) { inlineModuleIndex++; if (url && !isExcludedUrl(url) && !isPublicFile) { // const filePath = id.replace(normalizePath$3(config.root), ''); addToHTMLProxyCache(config, filePath, inlineModuleIndex, { code: contents, }); js += `\nimport "${id}?html-proxy&index=${inlineModuleIndex}.js"`; shouldRemove = true; } everyScriptIsAsync &&= isAsync; someScriptsAreAsync ||= isAsync; someScriptsAreDefer ||= !isAsync; } else if (url && !isPublicFile) { if (!isExcludedUrl(url)) { config.logger.warn(` asset for (const { start, end, url } of scriptUrls) { if (checkPublicFile(url, config)) { s.update(start, end, toOutputPublicFilePath(url)); } else if (!isExcludedUrl(url)) { s.update(start, end, await urlToBuiltUrl(url, id, config, this)); } } // ignore if its url can't be resolved const resolvedStyleUrls = await Promise.all(styleUrls.map(async (styleUrl) => ({ ...styleUrl, resolved: await this.resolve(styleUrl.url, id), }))); for (const { start, end, url, resolved } of resolvedStyleUrls) { if (resolved == null) { config.logger.warnOnce(`\n${url} doesn't exist at build time, it will remain unchanged to be resolved at runtime`); const importExpression = `\nimport ${JSON.stringify(url)}`; js = js.replace(importExpression, ''); } else { s.remove(start, end); } } processedHtml.set(id, s.toString()); // inject module preload polyfill only when configured and needed const { modulePreload } = config.build; if (modulePreload !== false && modulePreload.polyfill && (someScriptsAreAsync || someScriptsAreDefer)) { js = `import "${modulePreloadPolyfillId}";\n${js}`; } // Force rollup to keep this module from being shared between other entry points. // If the resulting chunk is empty, it will be removed in generateBundle. return { code: js, moduleSideEffects: 'no-treeshake' }; } }, async generateBundle(options, bundle) { const analyzedChunk = new Map(); const inlineEntryChunk = new Set(); const getImportedChunks = (chunk, seen = new Set()) => { const chunks = []; chunk.imports.forEach((file) => { const importee = bundle[file]; if (importee?.type === 'chunk' && !seen.has(file)) { seen.add(file); // post-order traversal chunks.push(...getImportedChunks(importee, seen)); chunks.push(importee); } }); return chunks; }; const toScriptTag = (chunk, toOutputPath, isAsync) => ({ tag: 'script', attrs: { ...(isAsync ? { async: true } : {}), type: 'module', // crossorigin must be set not only for serving assets in a different origin // but also to make it possible to preload the script using ``. // ``); preTransformRequest(server, modulePath, base); }; await traverseHtml(html, filename, (node) => { if (!nodeIsElement(node)) { return; } // script tags if (node.nodeName === 'script') { const { src, sourceCodeLocation, isModule } = getScriptInfo(node); if (src) { const processedUrl = processNodeUrl(src.value, isSrcSet(src), config, htmlPath, originalUrl, server, !isModule); if (processedUrl !== src.value) { overwriteAttrValue(s, sourceCodeLocation, processedUrl); } } else if (isModule && node.childNodes.length) { addInlineModule(node, 'js'); } else if (node.childNodes.length) { const scriptNode = node.childNodes[node.childNodes.length - 1]; for (const { url, start, end, } of extractImportExpressionFromClassicScript(scriptNode)) { const processedUrl = processNodeUrl(url, false, config, htmlPath, originalUrl); if (processedUrl !== url) { s.update(start, end, processedUrl); } } } } const inlineStyle = findNeedTransformStyleAttribute(node); if (inlineStyle) { inlineModuleIndex++; inlineStyles.push({ index: inlineModuleIndex, location: inlineStyle.location, code: inlineStyle.attr.value, }); } if (node.nodeName === 'style' && node.childNodes.length) { const children = node.childNodes[0]; styleUrl.push({ start: children.sourceCodeLocation.startOffset, end: children.sourceCodeLocation.endOffset, code: children.value, }); } // elements with [href/src] attrs const assetAttrs = assetAttrsConfig[node.nodeName]; if (assetAttrs) { for (const p of node.attrs) { const attrKey = getAttrKey(p); if (p.value && assetAttrs.includes(attrKey)) { const processedUrl = processNodeUrl(p.value, isSrcSet(p), config, htmlPath, originalUrl); if (processedUrl !== p.value) { overwriteAttrValue(s, node.sourceCodeLocation.attrs[attrKey], processedUrl); } } } } }); await Promise.all([ ...styleUrl.map(async ({ start, end, code }, index) => { const url = `${proxyModulePath}?html-proxy&direct&index=${index}.css`; // ensure module in graph after successful load const mod = await moduleGraph.ensureEntryFromUrl(url, false); ensureWatchedFile(watcher, mod.file, config.root); const result = await server.pluginContainer.transform(code, mod.id); let content = ''; if (result) { if (result.map && 'version' in result.map) { if (result.map.mappings) { await injectSourcesContent(result.map, proxyModulePath, config.logger); } content = getCodeWithSourcemap('css', result.code, result.map); } else { content = result.code; } } s.overwrite(start, end, content); }), ...inlineStyles.map(async ({ index, location, code }) => { // will transform with css plugin and cache result with css-post plugin const url = `${proxyModulePath}?html-proxy&inline-css&style-attr&index=${index}.css`; const mod = await moduleGraph.ensureEntryFromUrl(url, false); ensureWatchedFile(watcher, mod.file, config.root); await server?.pluginContainer.transform(code, mod.id); const hash = getHash(cleanUrl(mod.id)); const result = htmlProxyResult.get(`${hash}_${index}`); overwriteAttrValue(s, location, result ?? ''); }), ]); html = s.toString(); return { html, tags: [ { tag: 'script', attrs: { type: 'module', src: path$o.posix.join(base, CLIENT_PUBLIC_PATH), }, injectTo: 'head-prepend', }, ], }; }; function indexHtmlMiddleware(root, server) { const isDev = isDevServer(server); const fsUtils = getFsUtils(server.config); // Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...` return async function viteIndexHtmlMiddleware(req, res, next) { if (res.writableEnded) { return next(); } const url = req.url && cleanUrl(req.url); // htmlFallbackMiddleware appends '.html' to URLs if (url?.endsWith('.html') && req.headers['sec-fetch-dest'] !== 'script') { let filePath; if (isDev && url.startsWith(FS_PREFIX)) { filePath = decodeURIComponent(fsPathFromId(url)); } else { filePath = path$o.join(root, decodeURIComponent(url)); } if (fsUtils.existsSync(filePath)) { const headers = isDev ? server.config.server.headers : server.config.preview.headers; try { let html = await fsp.readFile(filePath, 'utf-8'); if (isDev) { html = await server.transformIndexHtml(url, html, req.originalUrl); } return send(req, res, html, 'html', { headers }); } catch (e) { return next(e); } } } next(); }; } function preTransformRequest(server, url, base) { if (!server.config.server.preTransformRequests) return; // transform all url as non-ssr as html includes client-side assets only try { url = unwrapId$1(stripBase(decodeURI(url), base)); } catch { // ignore return; } server.warmupRequest(url); } const logTime = createDebugger('vite:time'); function timeMiddleware(root) { // Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...` return function viteTimeMiddleware(req, res, next) { const start = performance.now(); const end = res.end; res.end = (...args) => { logTime?.(`${timeFrom(start)} ${prettifyUrl(req.url, root)}`); return end.call(res, ...args); }; next(); }; } class ModuleNode { /** * Public served url path, starts with / */ url; /** * Resolved file system path + query */ id = null; file = null; type; info; meta; importers = new Set(); clientImportedModules = new Set(); ssrImportedModules = new Set(); acceptedHmrDeps = new Set(); acceptedHmrExports = null; importedBindings = null; isSelfAccepting; transformResult = null; ssrTransformResult = null; ssrModule = null; ssrError = null; lastHMRTimestamp = 0; lastInvalidationTimestamp = 0; /** * If the module only needs to update its imports timestamp (e.g. within an HMR chain), * it is considered soft-invalidated. In this state, its `transformResult` should exist, * and the next `transformRequest` for this module will replace the timestamps. * * By default the value is `undefined` if it's not soft/hard-invalidated. If it gets * soft-invalidated, this will contain the previous `transformResult` value. If it gets * hard-invalidated, this will be set to `'HARD_INVALIDATED'`. * @internal */ invalidationState; /** * @internal */ ssrInvalidationState; /** * The module urls that are statically imported in the code. This information is separated * out from `importedModules` as only importers that statically import the module can be * soft invalidated. Other imports (e.g. watched files) needs the importer to be hard invalidated. * @internal */ staticImportedUrls; /** * @param setIsSelfAccepting - set `false` to set `isSelfAccepting` later. e.g. #7870 */ constructor(url, setIsSelfAccepting = true) { this.url = url; this.type = isDirectCSSRequest(url) ? 'css' : 'js'; if (setIsSelfAccepting) { this.isSelfAccepting = false; } } get importedModules() { const importedModules = new Set(this.clientImportedModules); for (const module of this.ssrImportedModules) { importedModules.add(module); } return importedModules; } } class ModuleGraph { resolveId; urlToModuleMap = new Map(); idToModuleMap = new Map(); etagToModuleMap = new Map(); // a single file may corresponds to multiple modules with different queries fileToModulesMap = new Map(); safeModulesPath = new Set(); /** * @internal */ _unresolvedUrlToModuleMap = new Map(); /** * @internal */ _ssrUnresolvedUrlToModuleMap = new Map(); constructor(resolveId) { this.resolveId = resolveId; } async getModuleByUrl(rawUrl, ssr) { // Quick path, if we already have a module for this rawUrl (even without extension) rawUrl = removeImportQuery(removeTimestampQuery(rawUrl)); const mod = this._getUnresolvedUrlToModule(rawUrl, ssr); if (mod) { return mod; } const [url] = await this._resolveUrl(rawUrl, ssr); return this.urlToModuleMap.get(url); } getModuleById(id) { return this.idToModuleMap.get(removeTimestampQuery(id)); } getModulesByFile(file) { return this.fileToModulesMap.get(file); } onFileChange(file) { const mods = this.getModulesByFile(file); if (mods) { const seen = new Set(); mods.forEach((mod) => { this.invalidateModule(mod, seen); }); } } invalidateModule(mod, seen = new Set(), timestamp = Date.now(), isHmr = false, /** @internal */ softInvalidate = false) { const prevInvalidationState = mod.invalidationState; const prevSsrInvalidationState = mod.ssrInvalidationState; // Handle soft invalidation before the `seen` check, as consecutive soft/hard invalidations can // cause the final soft invalidation state to be different. // If soft invalidated, save the previous `transformResult` so that we can reuse and transform the // import timestamps only in `transformRequest`. If there's no previous `transformResult`, hard invalidate it. if (softInvalidate) { mod.invalidationState ??= mod.transformResult ?? 'HARD_INVALIDATED'; mod.ssrInvalidationState ??= mod.ssrTransformResult ?? 'HARD_INVALIDATED'; } // If hard invalidated, further soft invalidations have no effect until it's reset to `undefined` else { mod.invalidationState = 'HARD_INVALIDATED'; mod.ssrInvalidationState = 'HARD_INVALIDATED'; } // Skip updating the module if it was already invalidated before and the invalidation state has not changed if (seen.has(mod) && prevInvalidationState === mod.invalidationState && prevSsrInvalidationState === mod.ssrInvalidationState) { return; } seen.add(mod); if (isHmr) { mod.lastHMRTimestamp = timestamp; } else { // Save the timestamp for this invalidation, so we can avoid caching the result of possible already started // processing being done for this module mod.lastInvalidationTimestamp = timestamp; } // Don't invalidate mod.info and mod.meta, as they are part of the processing pipeline // Invalidating the transform result is enough to ensure this module is re-processed next time it is requested const etag = mod.transformResult?.etag; if (etag) this.etagToModuleMap.delete(etag); mod.transformResult = null; mod.ssrTransformResult = null; mod.ssrModule = null; mod.ssrError = null; mod.importers.forEach((importer) => { if (!importer.acceptedHmrDeps.has(mod)) { // If the importer statically imports the current module, we can soft-invalidate the importer // to only update the import timestamps. If it's not statically imported, e.g. watched/glob file, // we can only soft invalidate if the current module was also soft-invalidated. A soft-invalidation // doesn't need to trigger a re-load and re-transform of the importer. const shouldSoftInvalidateImporter = importer.staticImportedUrls?.has(mod.url) || softInvalidate; this.invalidateModule(importer, seen, timestamp, isHmr, shouldSoftInvalidateImporter); } }); } invalidateAll() { const timestamp = Date.now(); const seen = new Set(); this.idToModuleMap.forEach((mod) => { this.invalidateModule(mod, seen, timestamp); }); } /** * Update the module graph based on a module's updated imports information * If there are dependencies that no longer have any importers, they are * returned as a Set. * * @param staticImportedUrls Subset of `importedModules` where they're statically imported in code. * This is only used for soft invalidations so `undefined` is fine but may cause more runtime processing. */ async updateModuleInfo(mod, importedModules, importedBindings, acceptedModules, acceptedExports, isSelfAccepting, ssr, /** @internal */ staticImportedUrls) { mod.isSelfAccepting = isSelfAccepting; const prevImports = ssr ? mod.ssrImportedModules : mod.clientImportedModules; let noLongerImported; let resolvePromises = []; let resolveResults = new Array(importedModules.size); let index = 0; // update import graph for (const imported of importedModules) { const nextIndex = index++; if (typeof imported === 'string') { resolvePromises.push(this.ensureEntryFromUrl(imported, ssr).then((dep) => { dep.importers.add(mod); resolveResults[nextIndex] = dep; })); } else { imported.importers.add(mod); resolveResults[nextIndex] = imported; } } if (resolvePromises.length) { await Promise.all(resolvePromises); } const nextImports = new Set(resolveResults); if (ssr) { mod.ssrImportedModules = nextImports; } else { mod.clientImportedModules = nextImports; } // remove the importer from deps that were imported but no longer are. prevImports.forEach((dep) => { if (!mod.clientImportedModules.has(dep) && !mod.ssrImportedModules.has(dep)) { dep.importers.delete(mod); if (!dep.importers.size) { (noLongerImported || (noLongerImported = new Set())).add(dep); } } }); // update accepted hmr deps resolvePromises = []; resolveResults = new Array(acceptedModules.size); index = 0; for (const accepted of acceptedModules) { const nextIndex = index++; if (typeof accepted === 'string') { resolvePromises.push(this.ensureEntryFromUrl(accepted, ssr).then((dep) => { resolveResults[nextIndex] = dep; })); } else { resolveResults[nextIndex] = accepted; } } if (resolvePromises.length) { await Promise.all(resolvePromises); } mod.acceptedHmrDeps = new Set(resolveResults); mod.staticImportedUrls = staticImportedUrls; // update accepted hmr exports mod.acceptedHmrExports = acceptedExports; mod.importedBindings = importedBindings; return noLongerImported; } async ensureEntryFromUrl(rawUrl, ssr, setIsSelfAccepting = true) { return this._ensureEntryFromUrl(rawUrl, ssr, setIsSelfAccepting); } /** * @internal */ async _ensureEntryFromUrl(rawUrl, ssr, setIsSelfAccepting = true, // Optimization, avoid resolving the same url twice if the caller already did it resolved) { // Quick path, if we already have a module for this rawUrl (even without extension) rawUrl = removeImportQuery(removeTimestampQuery(rawUrl)); let mod = this._getUnresolvedUrlToModule(rawUrl, ssr); if (mod) { return mod; } const modPromise = (async () => { const [url, resolvedId, meta] = await this._resolveUrl(rawUrl, ssr, resolved); mod = this.idToModuleMap.get(resolvedId); if (!mod) { mod = new ModuleNode(url, setIsSelfAccepting); if (meta) mod.meta = meta; this.urlToModuleMap.set(url, mod); mod.id = resolvedId; this.idToModuleMap.set(resolvedId, mod); const file = (mod.file = cleanUrl(resolvedId)); let fileMappedModules = this.fileToModulesMap.get(file); if (!fileMappedModules) { fileMappedModules = new Set(); this.fileToModulesMap.set(file, fileMappedModules); } fileMappedModules.add(mod); } // multiple urls can map to the same module and id, make sure we register // the url to the existing module in that case else if (!this.urlToModuleMap.has(url)) { this.urlToModuleMap.set(url, mod); } this._setUnresolvedUrlToModule(rawUrl, mod, ssr); return mod; })(); // Also register the clean url to the module, so that we can short-circuit // resolving the same url twice this._setUnresolvedUrlToModule(rawUrl, modPromise, ssr); return modPromise; } // some deps, like a css file referenced via @import, don't have its own // url because they are inlined into the main css import. But they still // need to be represented in the module graph so that they can trigger // hmr in the importing css file. createFileOnlyEntry(file) { file = normalizePath$3(file); let fileMappedModules = this.fileToModulesMap.get(file); if (!fileMappedModules) { fileMappedModules = new Set(); this.fileToModulesMap.set(file, fileMappedModules); } const url = `${FS_PREFIX}${file}`; for (const m of fileMappedModules) { if (m.url === url || m.id === file) { return m; } } const mod = new ModuleNode(url); mod.file = file; fileMappedModules.add(mod); return mod; } // for incoming urls, it is important to: // 1. remove the HMR timestamp query (?t=xxxx) and the ?import query // 2. resolve its extension so that urls with or without extension all map to // the same module async resolveUrl(url, ssr) { url = removeImportQuery(removeTimestampQuery(url)); const mod = await this._getUnresolvedUrlToModule(url, ssr); if (mod?.id) { return [mod.url, mod.id, mod.meta]; } return this._resolveUrl(url, ssr); } updateModuleTransformResult(mod, result, ssr) { if (ssr) { mod.ssrTransformResult = result; } else { const prevEtag = mod.transformResult?.etag; if (prevEtag) this.etagToModuleMap.delete(prevEtag); mod.transformResult = result; if (result?.etag) this.etagToModuleMap.set(result.etag, mod); } } getModuleByEtag(etag) { return this.etagToModuleMap.get(etag); } /** * @internal */ _getUnresolvedUrlToModule(url, ssr) { return (ssr ? this._ssrUnresolvedUrlToModuleMap : this._unresolvedUrlToModuleMap).get(url); } /** * @internal */ _setUnresolvedUrlToModule(url, mod, ssr) { (ssr ? this._ssrUnresolvedUrlToModuleMap : this._unresolvedUrlToModuleMap).set(url, mod); } /** * @internal */ async _resolveUrl(url, ssr, alreadyResolved) { const resolved = alreadyResolved ?? (await this.resolveId(url, !!ssr)); const resolvedId = resolved?.id || url; if (url !== resolvedId && !url.includes('\0') && !url.startsWith(`virtual:`)) { const ext = extname$1(cleanUrl(resolvedId)); if (ext) { const pathname = cleanUrl(url); if (!pathname.endsWith(ext)) { url = pathname + ext + url.slice(pathname.length); } } } return [url, resolvedId, resolved?.meta]; } } function notFoundMiddleware() { // Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...` return function vite404Middleware(_, res) { res.statusCode = 404; res.end(); }; } function warmupFiles(server) { const options = server.config.server.warmup; const root = server.config.root; if (options?.clientFiles?.length) { mapFiles(options.clientFiles, root).then((files) => { for (const file of files) { warmupFile(server, file, false); } }); } if (options?.ssrFiles?.length) { mapFiles(options.ssrFiles, root).then((files) => { for (const file of files) { warmupFile(server, file, true); } }); } } async function warmupFile(server, file, ssr) { // transform html with the `transformIndexHtml` hook as Vite internals would // pre-transform the imported JS modules linked. this may cause `transformIndexHtml` // plugins to be executed twice, but that's probably fine. if (file.endsWith('.html')) { const url = htmlFileToUrl(file, server.config.root); if (url) { try { const html = await fsp.readFile(file, 'utf-8'); await server.transformIndexHtml(url, html); } catch (e) { // Unexpected error, log the issue but avoid an unhandled exception server.config.logger.error(`Pre-transform error (${colors$1.cyan(file)}): ${e.message}`, { error: e, timestamp: true, }); } } } // for other files, pass it through `transformRequest` with warmup else { const url = fileToUrl(file, server.config.root); await server.warmupRequest(url, { ssr }); } } function htmlFileToUrl(file, root) { const url = path$o.relative(root, file); // out of root, ignore file if (url[0] === '.') return; // file within root, create root-relative url return '/' + normalizePath$3(url); } function fileToUrl(file, root) { const url = path$o.relative(root, file); // out of root, use /@fs/ prefix if (url[0] === '.') { return path$o.posix.join(FS_PREFIX, normalizePath$3(file)); } // file within root, create root-relative url return '/' + normalizePath$3(url); } function mapFiles(files, root) { return glob(files, { cwd: root, absolute: true, }); } function createServer(inlineConfig = {}) { return _createServer(inlineConfig, { hotListen: true }); } async function _createServer(inlineConfig = {}, options) { const config = await resolveConfig(inlineConfig, 'serve'); const initPublicFilesPromise = initPublicFiles(config); const { root, server: serverConfig } = config; const httpsOptions = await resolveHttpsConfig(config.server.https); const { middlewareMode } = serverConfig; const resolvedWatchOptions = resolveChokidarOptions(config, { disableGlobbing: true, ...serverConfig.watch, }); const middlewares = connect$1(); const httpServer = middlewareMode ? null : await resolveHttpServer(serverConfig, middlewares, httpsOptions); const ws = createWebSocketServer(httpServer, config, httpsOptions); const hot = createHMRBroadcaster() .addChannel(ws) .addChannel(createServerHMRChannel()); if (typeof config.server.hmr === 'object' && config.server.hmr.channels) { config.server.hmr.channels.forEach((channel) => hot.addChannel(channel)); } if (httpServer) { setClientErrorHandler(httpServer, config.logger); } // eslint-disable-next-line eqeqeq const watchEnabled = serverConfig.watch !== null; const watcher = watchEnabled ? chokidar.watch( // config file dependencies and env file might be outside of root [ root, ...config.configFileDependencies, ...getEnvFilesForMode(config.mode, config.envDir), ], resolvedWatchOptions) : createNoopWatcher(resolvedWatchOptions); const moduleGraph = new ModuleGraph((url, ssr) => container.resolveId(url, undefined, { ssr })); const container = await createPluginContainer(config, moduleGraph, watcher); const closeHttpServer = createServerCloseFn(httpServer); let exitProcess; const devHtmlTransformFn = createDevHtmlTransformFn(config); let server = { config, middlewares, httpServer, watcher, pluginContainer: container, ws, hot, moduleGraph, resolvedUrls: null, ssrTransform(code, inMap, url, originalCode = code) { return ssrTransform(code, inMap, url, originalCode, server.config); }, transformRequest(url, options) { return transformRequest(url, server, options); }, async warmupRequest(url, options) { await transformRequest(url, server, options).catch((e) => { if (e?.code === ERR_OUTDATED_OPTIMIZED_DEP || e?.code === ERR_CLOSED_SERVER) { // these are expected errors return; } // Unexpected error, log the issue but avoid an unhandled exception server.config.logger.error(`Pre-transform error: ${e.message}`, { error: e, timestamp: true, }); }); }, transformIndexHtml(url, html, originalUrl) { return devHtmlTransformFn(server, url, html, originalUrl); }, async ssrLoadModule(url, opts) { return ssrLoadModule(url, server, undefined, undefined, opts?.fixStacktrace); }, async ssrFetchModule(url, importer) { return ssrFetchModule(server, url, importer); }, ssrFixStacktrace(e) { ssrFixStacktrace(e, moduleGraph); }, ssrRewriteStacktrace(stack) { return ssrRewriteStacktrace(stack, moduleGraph); }, async reloadModule(module) { if (serverConfig.hmr !== false && module.file) { updateModules(module.file, [module], Date.now(), server); } }, async listen(port, isRestart) { await startServer(server, port); if (httpServer) { server.resolvedUrls = await resolveServerUrls(httpServer, config.server, config); if (!isRestart && config.server.open) server.openBrowser(); } return server; }, openBrowser() { const options = server.config.server; const url = server.resolvedUrls?.local[0] ?? server.resolvedUrls?.network[0]; if (url) { const path = typeof options.open === 'string' ? new URL(options.open, url).href : url; // We know the url that the browser would be opened to, so we can // start the request while we are awaiting the browser. This will // start the crawling of static imports ~500ms before. // preTransformRequests needs to be enabled for this optimization. if (server.config.server.preTransformRequests) { setTimeout(() => { const getMethod = path.startsWith('https:') ? get$1 : get$2; getMethod(path, { headers: { // Allow the history middleware to redirect to /index.html Accept: 'text/html', }, }, (res) => { res.on('end', () => { // Ignore response, scripts discovered while processing the entry // will be preprocessed (server.config.server.preTransformRequests) }); }) .on('error', () => { // Ignore errors }) .end(); }, 0); } openBrowser(path, true, server.config.logger); } else { server.config.logger.warn('No URL available to open in browser'); } }, async close() { if (!middlewareMode) { process.off('SIGTERM', exitProcess); if (process.env.CI !== 'true') { process.stdin.off('end', exitProcess); } } await Promise.allSettled([ watcher.close(), hot.close(), container.close(), getDepsOptimizer(server.config)?.close(), getDepsOptimizer(server.config, true)?.close(), closeHttpServer(), ]); // Await pending requests. We throw early in transformRequest // and in hooks if the server is closing for non-ssr requests, // so the import analysis plugin stops pre-transforming static // imports and this block is resolved sooner. // During SSR, we let pending requests finish to avoid exposing // the server closed error to the users. while (server._pendingRequests.size > 0) { await Promise.allSettled([...server._pendingRequests.values()].map((pending) => pending.request)); } server.resolvedUrls = null; }, printUrls() { if (server.resolvedUrls) { printServerUrls(server.resolvedUrls, serverConfig.host, config.logger.info); } else if (middlewareMode) { throw new Error('cannot print server URLs in middleware mode.'); } else { throw new Error('cannot print server URLs before server.listen is called.'); } }, bindCLIShortcuts(options) { bindCLIShortcuts(server, options); }, async restart(forceOptimize) { if (!server._restartPromise) { server._forceOptimizeOnRestart = !!forceOptimize; server._restartPromise = restartServer(server).finally(() => { server._restartPromise = null; server._forceOptimizeOnRestart = false; }); } return server._restartPromise; }, _setInternalServer(_server) { // Rebind internal the server variable so functions reference the user // server instance after a restart server = _server; }, _restartPromise: null, _importGlobMap: new Map(), _forceOptimizeOnRestart: false, _pendingRequests: new Map(), _fsDenyGlob: picomatch$4(config.server.fs.deny, { matchBase: true, nocase: true, }), _shortcutsOptions: undefined, }; // maintain consistency with the server instance after restarting. const reflexServer = new Proxy(server, { get: (_, property) => { return server[property]; }, set: (_, property, value) => { server[property] = value; return true; }, }); if (!middlewareMode) { exitProcess = async () => { try { await server.close(); } finally { process.exit(); } }; process.once('SIGTERM', exitProcess); if (process.env.CI !== 'true') { process.stdin.on('end', exitProcess); } } const publicFiles = await initPublicFilesPromise; const onHMRUpdate = async (file, configOnly) => { if (serverConfig.hmr !== false) { try { await handleHMRUpdate(file, server, configOnly); } catch (err) { hot.send({ type: 'error', err: prepareError(err), }); } } }; const { publicDir } = config; const onFileAddUnlink = async (file, isUnlink) => { file = normalizePath$3(file); await container.watchChange(file, { event: isUnlink ? 'delete' : 'create' }); if (publicDir && publicFiles) { if (file.startsWith(publicDir)) { const path = file.slice(publicDir.length); publicFiles[isUnlink ? 'delete' : 'add'](path); if (!isUnlink) { const moduleWithSamePath = await moduleGraph.getModuleByUrl(path); const etag = moduleWithSamePath?.transformResult?.etag; if (etag) { // The public file should win on the next request over a module with the // same path. Prevent the transform etag fast path from serving the module moduleGraph.etagToModuleMap.delete(etag); } } } } await handleFileAddUnlink(file, server, isUnlink); await onHMRUpdate(file, true); }; watcher.on('change', async (file) => { file = normalizePath$3(file); await container.watchChange(file, { event: 'update' }); // invalidate module graph cache on file change moduleGraph.onFileChange(file); await onHMRUpdate(file, false); }); getFsUtils(config).initWatcher?.(watcher); watcher.on('add', (file) => { onFileAddUnlink(file, false); }); watcher.on('unlink', (file) => { onFileAddUnlink(file, true); }); hot.on('vite:invalidate', async ({ path, message }) => { const mod = moduleGraph.urlToModuleMap.get(path); if (mod && mod.isSelfAccepting && mod.lastHMRTimestamp > 0) { config.logger.info(colors$1.yellow(`hmr invalidate `) + colors$1.dim(path) + (message ? ` ${message}` : ''), { timestamp: true }); const file = getShortName(mod.file, config.root); updateModules(file, [...mod.importers], mod.lastHMRTimestamp, server, true); } }); if (!middlewareMode && httpServer) { httpServer.once('listening', () => { // update actual port since this may be different from initial value serverConfig.port = httpServer.address().port; }); } // apply server configuration hooks from plugins const postHooks = []; for (const hook of config.getSortedPluginHooks('configureServer')) { postHooks.push(await hook(reflexServer)); } // Internal middlewares ------------------------------------------------------ // request timer if (process.env.DEBUG) { middlewares.use(timeMiddleware(root)); } // cors (enabled by default) const { cors } = serverConfig; if (cors !== false) { middlewares.use(corsMiddleware(typeof cors === 'boolean' ? {} : cors)); } middlewares.use(cachedTransformMiddleware(server)); // proxy const { proxy } = serverConfig; if (proxy) { const middlewareServer = (isObject$1(middlewareMode) ? middlewareMode.server : null) || httpServer; middlewares.use(proxyMiddleware(middlewareServer, proxy, config)); } // base if (config.base !== '/') { middlewares.use(baseMiddleware(config.rawBase, !!middlewareMode)); } // open in editor support middlewares.use('/__open-in-editor', launchEditorMiddleware$1()); // ping request handler // Keep the named function. The name is visible in debug logs via `DEBUG=connect:dispatcher ...` middlewares.use(function viteHMRPingMiddleware(req, res, next) { if (req.headers['accept'] === 'text/x-vite-ping') { res.writeHead(204).end(); } else { next(); } }); // serve static files under /public // this applies before the transform middleware so that these files are served // as-is without transforms. if (publicDir) { middlewares.use(servePublicMiddleware(server, publicFiles)); } // main transform middleware middlewares.use(transformMiddleware(server)); // serve static files middlewares.use(serveRawFsMiddleware(server)); middlewares.use(serveStaticMiddleware(server)); // html fallback if (config.appType === 'spa' || config.appType === 'mpa') { middlewares.use(htmlFallbackMiddleware(root, config.appType === 'spa', getFsUtils(config))); } // run post config hooks // This is applied before the html middleware so that user middleware can // serve custom content instead of index.html. postHooks.forEach((fn) => fn && fn()); if (config.appType === 'spa' || config.appType === 'mpa') { // transform index.html middlewares.use(indexHtmlMiddleware(root, server)); // handle 404s middlewares.use(notFoundMiddleware()); } // error handler middlewares.use(errorMiddleware(server, !!middlewareMode)); // httpServer.listen can be called multiple times // when port when using next port number // this code is to avoid calling buildStart multiple times let initingServer; let serverInited = false; const initServer = async () => { if (serverInited) return; if (initingServer) return initingServer; initingServer = (async function () { await container.buildStart({}); // start deps optimizer after all container plugins are ready if (isDepsOptimizerEnabled(config, false)) { await initDepsOptimizer(config, server); } warmupFiles(server); initingServer = undefined; serverInited = true; })(); return initingServer; }; if (!middlewareMode && httpServer) { // overwrite listen to init optimizer before server start const listen = httpServer.listen.bind(httpServer); httpServer.listen = (async (port, ...args) => { try { // ensure ws server started hot.listen(); await initServer(); } catch (e) { httpServer.emit('error', e); return; } return listen(port, ...args); }); } else { if (options.hotListen) { hot.listen(); } await initServer(); } return server; } async function startServer(server, inlinePort) { const httpServer = server.httpServer; if (!httpServer) { throw new Error('Cannot call server.listen in middleware mode.'); } const options = server.config.server; const hostname = await resolveHostname(options.host); const configPort = inlinePort ?? options.port; // When using non strict port for the dev server, the running port can be different from the config one. // When restarting, the original port may be available but to avoid a switch of URL for the running // browser tabs, we enforce the previously used port, expect if the config port changed. const port = (!configPort || configPort === server._configServerPort ? server._currentServerPort : configPort) ?? DEFAULT_DEV_PORT; server._configServerPort = configPort; const serverPort = await httpServerStart(httpServer, { port, strictPort: options.strictPort, host: hostname.host, logger: server.config.logger, }); server._currentServerPort = serverPort; } function createServerCloseFn(server) { if (!server) { return () => Promise.resolve(); } let hasListened = false; const openSockets = new Set(); server.on('connection', (socket) => { openSockets.add(socket); socket.on('close', () => { openSockets.delete(socket); }); }); server.once('listening', () => { hasListened = true; }); return () => new Promise((resolve, reject) => { openSockets.forEach((s) => s.destroy()); if (hasListened) { server.close((err) => { if (err) { reject(err); } else { resolve(); } }); } else { resolve(); } }); } function resolvedAllowDir(root, dir) { return normalizePath$3(path$o.resolve(root, dir)); } function resolveServerOptions(root, raw, logger) { const server = { preTransformRequests: true, ...raw, sourcemapIgnoreList: raw?.sourcemapIgnoreList === false ? () => false : raw?.sourcemapIgnoreList || isInNodeModules$1, middlewareMode: raw?.middlewareMode || false, }; let allowDirs = server.fs?.allow; const deny = server.fs?.deny || ['.env', '.env.*', '*.{crt,pem}']; if (!allowDirs) { allowDirs = [searchForWorkspaceRoot(root)]; } allowDirs = allowDirs.map((i) => resolvedAllowDir(root, i)); // only push client dir when vite itself is outside-of-root const resolvedClientDir = resolvedAllowDir(root, CLIENT_DIR); if (!allowDirs.some((dir) => isParentDirectory(dir, resolvedClientDir))) { allowDirs.push(resolvedClientDir); } server.fs = { strict: server.fs?.strict ?? true, allow: allowDirs, deny, cachedChecks: server.fs?.cachedChecks, }; if (server.origin?.endsWith('/')) { server.origin = server.origin.slice(0, -1); logger.warn(colors$1.yellow(`${colors$1.bold('(!)')} server.origin should not end with "/". Using "${server.origin}" instead.`)); } return server; } async function restartServer(server) { global.__vite_start_time = performance.now(); const shortcutsOptions = server._shortcutsOptions; let inlineConfig = server.config.inlineConfig; if (server._forceOptimizeOnRestart) { inlineConfig = mergeConfig(inlineConfig, { optimizeDeps: { force: true, }, }); } // Reinit the server by creating a new instance using the same inlineConfig // This will triger a reload of the config file and re-create the plugins and // middlewares. We then assign all properties of the new server to the existing // server instance and set the user instance to be used in the new server. // This allows us to keep the same server instance for the user. { let newServer = null; try { // delay ws server listen newServer = await _createServer(inlineConfig, { hotListen: false }); } catch (err) { server.config.logger.error(err.message, { timestamp: true, }); server.config.logger.error('server restart failed', { timestamp: true }); return; } await server.close(); // Assign new server props to existing server instance const middlewares = server.middlewares; newServer._configServerPort = server._configServerPort; newServer._currentServerPort = server._currentServerPort; Object.assign(server, newServer); // Keep the same connect instance so app.use(vite.middlewares) works // after a restart in middlewareMode (.route is always '/') middlewares.stack = newServer.middlewares.stack; server.middlewares = middlewares; // Rebind internal server variable so functions reference the user server newServer._setInternalServer(server); } const { logger, server: { port, middlewareMode }, } = server.config; if (!middlewareMode) { await server.listen(port, true); } else { server.hot.listen(); } logger.info('server restarted.', { timestamp: true }); if (shortcutsOptions) { shortcutsOptions.print = false; bindCLIShortcuts(server, shortcutsOptions); } } /** * Internal function to restart the Vite server and print URLs if changed */ async function restartServerWithUrls(server) { if (server.config.server.middlewareMode) { await server.restart(); return; } const { port: prevPort, host: prevHost } = server.config.server; const prevUrls = server.resolvedUrls; await server.restart(); const { logger, server: { port, host }, } = server.config; if ((port ?? DEFAULT_DEV_PORT) !== (prevPort ?? DEFAULT_DEV_PORT) || host !== prevHost || diffDnsOrderChange(prevUrls, server.resolvedUrls)) { logger.info(''); server.printUrls(); } } var index = { __proto__: null, _createServer: _createServer, createServer: createServer, createServerCloseFn: createServerCloseFn, resolveServerOptions: resolveServerOptions, restartServerWithUrls: restartServerWithUrls }; const debugHmr = createDebugger('vite:hmr'); const whitespaceRE = /\s/; const normalizedClientDir = normalizePath$3(CLIENT_DIR); function getShortName(file, root) { return file.startsWith(withTrailingSlash(root)) ? path$o.posix.relative(root, file) : file; } async function handleHMRUpdate(file, server, configOnly) { const { hot, config, moduleGraph } = server; const shortFile = getShortName(file, config.root); const isConfig = file === config.configFile; const isConfigDependency = config.configFileDependencies.some((name) => file === name); const isEnv = config.inlineConfig.envFile !== false && getEnvFilesForMode(config.mode, config.envDir).includes(file); if (isConfig || isConfigDependency || isEnv) { // auto restart server debugHmr?.(`[config change] ${colors$1.dim(shortFile)}`); config.logger.info(colors$1.green(`${path$o.relative(process.cwd(), file)} changed, restarting server...`), { clear: true, timestamp: true }); try { await restartServerWithUrls(server); } catch (e) { config.logger.error(colors$1.red(e)); } return; } if (configOnly) { return; } debugHmr?.(`[file change] ${colors$1.dim(shortFile)}`); // (dev only) the client itself cannot be hot updated. if (file.startsWith(withTrailingSlash(normalizedClientDir))) { hot.send({ type: 'full-reload', path: '*', triggeredBy: path$o.resolve(config.root, file), }); return; } const mods = moduleGraph.getModulesByFile(file); // check if any plugin wants to perform custom HMR handling const timestamp = Date.now(); const hmrContext = { file, timestamp, modules: mods ? [...mods] : [], read: () => readModifiedFile(file), server, }; for (const hook of config.getSortedPluginHooks('handleHotUpdate')) { const filteredModules = await hook(hmrContext); if (filteredModules) { hmrContext.modules = filteredModules; } } if (!hmrContext.modules.length) { // html file cannot be hot updated if (file.endsWith('.html')) { config.logger.info(colors$1.green(`page reload `) + colors$1.dim(shortFile), { clear: true, timestamp: true, }); hot.send({ type: 'full-reload', path: config.server.middlewareMode ? '*' : '/' + normalizePath$3(path$o.relative(config.root, file)), }); } else { // loaded but not in the module graph, probably not js debugHmr?.(`[no modules matched] ${colors$1.dim(shortFile)}`); } return; } updateModules(shortFile, hmrContext.modules, timestamp, server); } function updateModules(file, modules, timestamp, { config, hot, moduleGraph }, afterInvalidation) { const updates = []; const invalidatedModules = new Set(); const traversedModules = new Set(); let needFullReload = false; for (const mod of modules) { const boundaries = []; const hasDeadEnd = propagateUpdate(mod, traversedModules, boundaries); moduleGraph.invalidateModule(mod, invalidatedModules, timestamp, true); if (needFullReload) { continue; } if (hasDeadEnd) { needFullReload = hasDeadEnd; continue; } updates.push(...boundaries.map(({ boundary, acceptedVia, isWithinCircularImport }) => ({ type: `${boundary.type}-update`, timestamp, path: normalizeHmrUrl(boundary.url), acceptedPath: normalizeHmrUrl(acceptedVia.url), explicitImportRequired: boundary.type === 'js' ? isExplicitImportRequired(acceptedVia.url) : false, isWithinCircularImport, // browser modules are invalidated by changing ?t= query, // but in ssr we control the module system, so we can directly remove them form cache ssrInvalidates: getSSRInvalidatedImporters(acceptedVia), }))); } if (needFullReload) { const reason = typeof needFullReload === 'string' ? colors$1.dim(` (${needFullReload})`) : ''; config.logger.info(colors$1.green(`page reload `) + colors$1.dim(file) + reason, { clear: !afterInvalidation, timestamp: true }); hot.send({ type: 'full-reload', triggeredBy: path$o.resolve(config.root, file), }); return; } if (updates.length === 0) { debugHmr?.(colors$1.yellow(`no update happened `) + colors$1.dim(file)); return; } config.logger.info(colors$1.green(`hmr update `) + colors$1.dim([...new Set(updates.map((u) => u.path))].join(', ')), { clear: !afterInvalidation, timestamp: true }); hot.send({ type: 'update', updates, }); } function populateSSRImporters(module, timestamp, seen = new Set()) { module.ssrImportedModules.forEach((importer) => { if (seen.has(importer)) { return; } if (importer.lastHMRTimestamp === timestamp || importer.lastInvalidationTimestamp === timestamp) { seen.add(importer); populateSSRImporters(importer, timestamp, seen); } }); return seen; } function getSSRInvalidatedImporters(module) { return [...populateSSRImporters(module, module.lastHMRTimestamp)].map((m) => m.file); } async function handleFileAddUnlink(file, server, isUnlink) { const modules = [...(server.moduleGraph.getModulesByFile(file) || [])]; if (isUnlink) { for (const deletedMod of modules) { deletedMod.importedModules.forEach((importedMod) => { importedMod.importers.delete(deletedMod); }); } } modules.push(...getAffectedGlobModules(file, server)); if (modules.length > 0) { updateModules(getShortName(file, server.config.root), unique(modules), Date.now(), server); } } function areAllImportsAccepted(importedBindings, acceptedExports) { for (const binding of importedBindings) { if (!acceptedExports.has(binding)) { return false; } } return true; } function propagateUpdate(node, traversedModules, boundaries, currentChain = [node]) { if (traversedModules.has(node)) { return false; } traversedModules.add(node); // #7561 // if the imports of `node` have not been analyzed, then `node` has not // been loaded in the browser and we should stop propagation. if (node.id && node.isSelfAccepting === undefined) { debugHmr?.(`[propagate update] stop propagation because not analyzed: ${colors$1.dim(node.id)}`); return false; } if (node.isSelfAccepting) { boundaries.push({ boundary: node, acceptedVia: node, isWithinCircularImport: isNodeWithinCircularImports(node, currentChain), }); // additionally check for CSS importers, since a PostCSS plugin like // Tailwind JIT may register any file as a dependency to a CSS file. for (const importer of node.importers) { if (isCSSRequest(importer.url) && !currentChain.includes(importer)) { propagateUpdate(importer, traversedModules, boundaries, currentChain.concat(importer)); } } return false; } // A partially accepted module with no importers is considered self accepting, // because the deal is "there are parts of myself I can't self accept if they // are used outside of me". // Also, the imported module (this one) must be updated before the importers, // so that they do get the fresh imported module when/if they are reloaded. if (node.acceptedHmrExports) { boundaries.push({ boundary: node, acceptedVia: node, isWithinCircularImport: isNodeWithinCircularImports(node, currentChain), }); } else { if (!node.importers.size) { return true; } // #3716, #3913 // For a non-CSS file, if all of its importers are CSS files (registered via // PostCSS plugins) it should be considered a dead end and force full reload. if (!isCSSRequest(node.url) && [...node.importers].every((i) => isCSSRequest(i.url))) { return true; } } for (const importer of node.importers) { const subChain = currentChain.concat(importer); if (importer.acceptedHmrDeps.has(node)) { boundaries.push({ boundary: importer, acceptedVia: node, isWithinCircularImport: isNodeWithinCircularImports(importer, subChain), }); continue; } if (node.id && node.acceptedHmrExports && importer.importedBindings) { const importedBindingsFromNode = importer.importedBindings.get(node.id); if (importedBindingsFromNode && areAllImportsAccepted(importedBindingsFromNode, node.acceptedHmrExports)) { continue; } } if (!currentChain.includes(importer) && propagateUpdate(importer, traversedModules, boundaries, subChain)) { return true; } } return false; } /** * Check importers recursively if it's an import loop. An accepted module within * an import loop cannot recover its execution order and should be reloaded. * * @param node The node that accepts HMR and is a boundary * @param nodeChain The chain of nodes/imports that lead to the node. * (The last node in the chain imports the `node` parameter) * @param currentChain The current chain tracked from the `node` parameter * @param traversedModules The set of modules that have traversed */ function isNodeWithinCircularImports(node, nodeChain, currentChain = [node], traversedModules = new Set()) { // To help visualize how each parameters work, imagine this import graph: // // A -> B -> C -> ACCEPTED -> D -> E -> NODE // ^--------------------------| // // ACCEPTED: the node that accepts HMR. the `node` parameter. // NODE : the initial node that triggered this HMR. // // This function will return true in the above graph, which: // `node` : ACCEPTED // `nodeChain` : [NODE, E, D, ACCEPTED] // `currentChain` : [ACCEPTED, C, B] // // It works by checking if any `node` importers are within `nodeChain`, which // means there's an import loop with a HMR-accepted module in it. if (traversedModules.has(node)) { return false; } traversedModules.add(node); for (const importer of node.importers) { // Node may import itself which is safe if (importer === node) continue; // a PostCSS plugin like Tailwind JIT may register // any file as a dependency to a CSS file. // But in that case, the actual dependency chain is separate. if (isCSSRequest(importer.url)) continue; // Check circular imports const importerIndex = nodeChain.indexOf(importer); if (importerIndex > -1) { // Log extra debug information so users can fix and remove the circular imports if (debugHmr) { // Following explanation above: // `importer` : E // `currentChain` reversed : [B, C, ACCEPTED] // `nodeChain` sliced & reversed : [D, E] // Combined : [E, B, C, ACCEPTED, D, E] const importChain = [ importer, ...[...currentChain].reverse(), ...nodeChain.slice(importerIndex, -1).reverse(), ]; debugHmr(colors$1.yellow(`circular imports detected: `) + importChain.map((m) => colors$1.dim(m.url)).join(' -> ')); } return true; } // Continue recursively if (!currentChain.includes(importer)) { const result = isNodeWithinCircularImports(importer, nodeChain, currentChain.concat(importer), traversedModules); if (result) return result; } } return false; } function handlePrunedModules(mods, { hot }) { // update the disposed modules' hmr timestamp // since if it's re-imported, it should re-apply side effects // and without the timestamp the browser will not re-import it! const t = Date.now(); mods.forEach((mod) => { mod.lastHMRTimestamp = t; debugHmr?.(`[dispose] ${colors$1.dim(mod.file)}`); }); hot.send({ type: 'prune', paths: [...mods].map((m) => m.url), }); } /** * Lex import.meta.hot.accept() for accepted deps. * Since hot.accept() can only accept string literals or array of string * literals, we don't really need a heavy @babel/parse call on the entire source. * * @returns selfAccepts */ function lexAcceptedHmrDeps(code, start, urls) { let state = 0 /* LexerState.inCall */; // the state can only be 2 levels deep so no need for a stack let prevState = 0 /* LexerState.inCall */; let currentDep = ''; function addDep(index) { urls.add({ url: currentDep, start: index - currentDep.length - 1, end: index + 1, }); currentDep = ''; } for (let i = start; i < code.length; i++) { const char = code.charAt(i); switch (state) { case 0 /* LexerState.inCall */: case 4 /* LexerState.inArray */: if (char === `'`) { prevState = state; state = 1 /* LexerState.inSingleQuoteString */; } else if (char === `"`) { prevState = state; state = 2 /* LexerState.inDoubleQuoteString */; } else if (char === '`') { prevState = state; state = 3 /* LexerState.inTemplateString */; } else if (whitespaceRE.test(char)) { continue; } else { if (state === 0 /* LexerState.inCall */) { if (char === `[`) { state = 4 /* LexerState.inArray */; } else { // reaching here means the first arg is neither a string literal // nor an Array literal (direct callback) or there is no arg // in both case this indicates a self-accepting module return true; // done } } else if (state === 4 /* LexerState.inArray */) { if (char === `]`) { return false; // done } else if (char === ',') { continue; } else { error(i); } } } break; case 1 /* LexerState.inSingleQuoteString */: if (char === `'`) { addDep(i); if (prevState === 0 /* LexerState.inCall */) { // accept('foo', ...) return false; } else { state = prevState; } } else { currentDep += char; } break; case 2 /* LexerState.inDoubleQuoteString */: if (char === `"`) { addDep(i); if (prevState === 0 /* LexerState.inCall */) { // accept('foo', ...) return false; } else { state = prevState; } } else { currentDep += char; } break; case 3 /* LexerState.inTemplateString */: if (char === '`') { addDep(i); if (prevState === 0 /* LexerState.inCall */) { // accept('foo', ...) return false; } else { state = prevState; } } else if (char === '$' && code.charAt(i + 1) === '{') { error(i); } else { currentDep += char; } break; default: throw new Error('unknown import.meta.hot lexer state'); } } return false; } function lexAcceptedHmrExports(code, start, exportNames) { const urls = new Set(); lexAcceptedHmrDeps(code, start, urls); for (const { url } of urls) { exportNames.add(url); } return urls.size > 0; } function normalizeHmrUrl(url) { if (url[0] !== '.' && url[0] !== '/') { url = wrapId$1(url); } return url; } function error(pos) { const err = new Error(`import.meta.hot.accept() can only accept string literals or an ` + `Array of string literals.`); err.pos = pos; throw err; } // vitejs/vite#610 when hot-reloading Vue files, we read immediately on file // change event and sometimes this can be too early and get an empty buffer. // Poll until the file's modified time has changed before reading again. async function readModifiedFile(file) { const content = await fsp.readFile(file, 'utf-8'); if (!content) { const mtime = (await fsp.stat(file)).mtimeMs; for (let n = 0; n < 10; n++) { await new Promise((r) => setTimeout(r, 10)); const newMtime = (await fsp.stat(file)).mtimeMs; if (newMtime !== mtime) { break; } } return await fsp.readFile(file, 'utf-8'); } else { return content; } } function createHMRBroadcaster() { const channels = []; const readyChannels = new WeakSet(); const broadcaster = { get channels() { return [...channels]; }, addChannel(channel) { if (channels.some((c) => c.name === channel.name)) { throw new Error(`HMR channel "${channel.name}" is already defined.`); } channels.push(channel); return broadcaster; }, on(event, listener) { // emit connection event only when all channels are ready if (event === 'connection') { // make a copy so we don't wait for channels that might be added after this is triggered const channels = this.channels; channels.forEach((channel) => channel.on('connection', () => { readyChannels.add(channel); if (channels.every((c) => readyChannels.has(c))) { listener(); } })); return; } channels.forEach((channel) => channel.on(event, listener)); return; }, off(event, listener) { channels.forEach((channel) => channel.off(event, listener)); return; }, send(...args) { channels.forEach((channel) => channel.send(...args)); }, listen() { channels.forEach((channel) => channel.listen()); }, close() { return Promise.all(channels.map((channel) => channel.close())); }, }; return broadcaster; } function createServerHMRChannel() { const innerEmitter = new EventEmitter$4(); const outsideEmitter = new EventEmitter$4(); return { name: 'ssr', send(...args) { let payload; if (typeof args[0] === 'string') { payload = { type: 'custom', event: args[0], data: args[1], }; } else { payload = args[0]; } outsideEmitter.emit('send', payload); }, off(event, listener) { innerEmitter.off(event, listener); }, on: ((event, listener) => { innerEmitter.on(event, listener); }), close() { innerEmitter.removeAllListeners(); outsideEmitter.removeAllListeners(); }, listen() { innerEmitter.emit('connection'); }, api: { innerEmitter, outsideEmitter, }, }; } const debug$1 = createDebugger('vite:import-analysis'); const clientDir = normalizePath$3(CLIENT_DIR); const skipRE = /\.(?:map|json)(?:$|\?)/; const canSkipImportAnalysis = (id) => skipRE.test(id) || isDirectCSSRequest(id); const optimizedDepChunkRE = /\/chunk-[A-Z\d]{8}\.js/; const optimizedDepDynamicRE = /-[A-Z\d]{8}\.js/; const hasViteIgnoreRE = /\/\*\s*@vite-ignore\s*\*\//; const urlIsStringRE = /^(?:'.*'|".*"|`.*`)$/; const templateLiteralRE = /^\s*`(.*)`\s*$/; function isExplicitImportRequired(url) { return !isJSRequest(url) && !isCSSRequest(url); } function extractImportedBindings(id, source, importSpec, importedBindings) { let bindings = importedBindings.get(id); if (!bindings) { bindings = new Set(); importedBindings.set(id, bindings); } const isDynamic = importSpec.d > -1; const isMeta = importSpec.d === -2; if (isDynamic || isMeta) { // this basically means the module will be impacted by any change in its dep bindings.add('*'); return; } const exp = source.slice(importSpec.ss, importSpec.se); const [match0] = findStaticImports(exp); if (!match0) { return; } const parsed = parseStaticImport(match0); if (!parsed) { return; } if (parsed.namespacedImport) { bindings.add('*'); } if (parsed.defaultImport) { bindings.add('default'); } if (parsed.namedImports) { for (const name of Object.keys(parsed.namedImports)) { bindings.add(name); } } } /** * Server-only plugin that lexes, resolves, rewrites and analyzes url imports. * * - Imports are resolved to ensure they exist on disk * * - Lexes HMR accept calls and updates import relationships in the module graph * * - Bare module imports are resolved (by @rollup-plugin/node-resolve) to * absolute file paths, e.g. * * ```js * import 'foo' * ``` * is rewritten to * ```js * import '/@fs//project/node_modules/foo/dist/foo.js' * ``` * * - CSS imports are appended with `.js` since both the js module and the actual * css (referenced via ``) may go through the transform pipeline: * * ```js * import './style.css' * ``` * is rewritten to * ```js * import './style.css.js' * ``` */ function importAnalysisPlugin(config) { const { root, base } = config; const fsUtils = getFsUtils(config); const clientPublicPath = path$o.posix.join(base, CLIENT_PUBLIC_PATH); const enablePartialAccept = config.experimental?.hmrPartialAccept; let server; let _env; let _ssrEnv; function getEnv(ssr) { if (!_ssrEnv || !_env) { const importMetaEnvKeys = {}; const userDefineEnv = {}; for (const key in config.env) { importMetaEnvKeys[key] = JSON.stringify(config.env[key]); } for (const key in config.define) { // non-import.meta.env.* is handled in `clientInjection` plugin if (key.startsWith('import.meta.env.')) { userDefineEnv[key.slice(16)] = config.define[key]; } } const env = `import.meta.env = ${serializeDefine({ ...importMetaEnvKeys, SSR: '__vite_ssr__', ...userDefineEnv, })};`; _ssrEnv = env.replace('__vite_ssr__', 'true'); _env = env.replace('__vite_ssr__', 'false'); } return ssr ? _ssrEnv : _env; } return { name: 'vite:import-analysis', configureServer(_server) { server = _server; }, async transform(source, importer, options) { // In a real app `server` is always defined, but it is undefined when // running src/node/server/__tests__/pluginContainer.spec.ts if (!server) { return null; } const ssr = options?.ssr === true; if (canSkipImportAnalysis(importer)) { debug$1?.(colors$1.dim(`[skipped] ${prettifyUrl(importer, root)}`)); return null; } const msAtStart = debug$1 ? performance.now() : 0; await init; let imports; let exports; source = stripBomTag(source); try { [imports, exports] = parse$e(source); } catch (_e) { const e = _e; const { message, showCodeFrame } = createParseErrorInfo(importer, source); this.error(message, showCodeFrame ? e.idx : undefined); } const depsOptimizer = getDepsOptimizer(config, ssr); const { moduleGraph } = server; // since we are already in the transform phase of the importer, it must // have been loaded so its entry is guaranteed in the module graph. const importerModule = moduleGraph.getModuleById(importer); if (!importerModule) { // This request is no longer valid. It could happen for optimized deps // requests. A full reload is going to request this id again. // Throwing an outdated error so we properly finish the request with a // 504 sent to the browser. throwOutdatedRequest(importer); } if (!imports.length && !this._addedImports) { importerModule.isSelfAccepting = false; debug$1?.(`${timeFrom(msAtStart)} ${colors$1.dim(`[no imports] ${prettifyUrl(importer, root)}`)}`); return source; } let hasHMR = false; let isSelfAccepting = false; let hasEnv = false; let needQueryInjectHelper = false; let s; const str = () => s || (s = new MagicString(source)); let isPartiallySelfAccepting = false; const importedBindings = enablePartialAccept ? new Map() : null; const toAbsoluteUrl = (url) => path$o.posix.resolve(path$o.posix.dirname(importerModule.url), url); const normalizeUrl = async (url, pos, forceSkipImportAnalysis = false) => { url = stripBase(url, base); let importerFile = importer; const optimizeDeps = getDepOptimizationConfig(config, ssr); if (moduleListContains(optimizeDeps?.exclude, url)) { if (depsOptimizer) { await depsOptimizer.scanProcessing; // if the dependency encountered in the optimized file was excluded from the optimization // the dependency needs to be resolved starting from the original source location of the optimized file // because starting from node_modules/.vite will not find the dependency if it was not hoisted // (that is, if it is under node_modules directory in the package source of the optimized file) for (const optimizedModule of depsOptimizer.metadata.depInfoList) { if (!optimizedModule.src) continue; // Ignore chunks if (optimizedModule.file === importerModule.file) { importerFile = optimizedModule.src; } } } } const resolved = await this.resolve(url, importerFile); if (!resolved || resolved.meta?.['vite:alias']?.noResolved) { // in ssr, we should let node handle the missing modules if (ssr) { return [url, url]; } // fix#9534, prevent the importerModuleNode being stopped from propagating updates importerModule.isSelfAccepting = false; return this.error(`Failed to resolve import "${url}" from "${normalizePath$3(path$o.relative(process.cwd(), importerFile))}". Does the file exist?`, pos); } if (isExternalUrl(resolved.id)) { return [resolved.id, resolved.id]; } const isRelative = url[0] === '.'; const isSelfImport = !isRelative && cleanUrl(url) === cleanUrl(importer); // normalize all imports into resolved URLs // e.g. `import 'foo'` -> `import '/@fs/.../node_modules/foo/index.js'` if (resolved.id.startsWith(withTrailingSlash(root))) { // in root: infer short absolute path from root url = resolved.id.slice(root.length); } else if (depsOptimizer?.isOptimizedDepFile(resolved.id) || // vite-plugin-react isn't following the leading \0 virtual module convention. // This is a temporary hack to avoid expensive fs checks for React apps. // We'll remove this as soon we're able to fix the react plugins. (resolved.id !== '/@react-refresh' && path$o.isAbsolute(resolved.id) && fsUtils.existsSync(cleanUrl(resolved.id)))) { // an optimized deps may not yet exists in the filesystem, or // a regular file exists but is out of root: rewrite to absolute /@fs/ paths url = path$o.posix.join(FS_PREFIX, resolved.id); } else { url = resolved.id; } // if the resolved id is not a valid browser import specifier, // prefix it to make it valid. We will strip this before feeding it // back into the transform pipeline if (url[0] !== '.' && url[0] !== '/') { url = wrapId$1(resolved.id); } // make the URL browser-valid if not SSR if (!ssr) { // mark non-js/css imports with `?import` if (isExplicitImportRequired(url)) { url = injectQuery(url, 'import'); } else if ((isRelative || isSelfImport) && !DEP_VERSION_RE.test(url)) { // If the url isn't a request for a pre-bundled common chunk, // for relative js/css imports, or self-module virtual imports // (e.g. vue blocks), inherit importer's version query // do not do this for unknown type imports, otherwise the appended // query can break 3rd party plugin's extension checks. const versionMatch = importer.match(DEP_VERSION_RE); if (versionMatch) { url = injectQuery(url, versionMatch[1]); } } // check if the dep has been hmr updated. If yes, we need to attach // its last updated timestamp to force the browser to fetch the most // up-to-date version of this module. try { // delay setting `isSelfAccepting` until the file is actually used (#7870) // We use an internal function to avoid resolving the url again const depModule = await moduleGraph._ensureEntryFromUrl(unwrapId$1(url), ssr, canSkipImportAnalysis(url) || forceSkipImportAnalysis, resolved); if (depModule.lastHMRTimestamp > 0) { url = injectQuery(url, `t=${depModule.lastHMRTimestamp}`); } } catch (e) { // it's possible that the dep fails to resolve (non-existent import) // attach location to the missing import e.pos = pos; throw e; } // prepend base url = joinUrlSegments(base, url); } return [url, resolved.id]; }; const orderedImportedUrls = new Array(imports.length); const orderedAcceptedUrls = new Array(imports.length); const orderedAcceptedExports = new Array(imports.length); await Promise.all(imports.map(async (importSpecifier, index) => { const { s: start, e: end, ss: expStart, se: expEnd, d: dynamicIndex, a: attributeIndex, } = importSpecifier; // #2083 User may use escape path, // so use imports[index].n to get the unescaped string let specifier = importSpecifier.n; const rawUrl = source.slice(start, end); // check import.meta usage if (rawUrl === 'import.meta') { const prop = source.slice(end, end + 4); if (prop === '.hot') { hasHMR = true; const endHot = end + 4 + (source[end + 4] === '?' ? 1 : 0); if (source.slice(endHot, endHot + 7) === '.accept') { // further analyze accepted modules if (source.slice(endHot, endHot + 14) === '.acceptExports') { const importAcceptedExports = (orderedAcceptedExports[index] = new Set()); lexAcceptedHmrExports(source, source.indexOf('(', endHot + 14) + 1, importAcceptedExports); isPartiallySelfAccepting = true; } else { const importAcceptedUrls = (orderedAcceptedUrls[index] = new Set()); if (lexAcceptedHmrDeps(source, source.indexOf('(', endHot + 7) + 1, importAcceptedUrls)) { isSelfAccepting = true; } } } } else if (prop === '.env') { hasEnv = true; } return; } else if (templateLiteralRE.test(rawUrl)) { // If the import has backticks but isn't transformed as a glob import // (as there's nothing to glob), check if it's simply a plain string. // If so, we can replace the specifier as a plain string to prevent // an incorrect "cannot be analyzed" warning. if (!(rawUrl.includes('${') && rawUrl.includes('}'))) { specifier = rawUrl.replace(templateLiteralRE, '$1'); } } const isDynamicImport = dynamicIndex > -1; // strip import attributes as we can process them ourselves if (!isDynamicImport && attributeIndex > -1) { str().remove(end + 1, expEnd); } // static import or valid string in dynamic import // If resolvable, let's resolve it if (specifier !== undefined) { // skip external / data uri if (isExternalUrl(specifier) || isDataUrl(specifier)) { return; } // skip ssr external if (ssr) { if (shouldExternalizeForSSR(specifier, importer, config)) { return; } if (isBuiltin(specifier)) { return; } } // skip client if (specifier === clientPublicPath) { return; } // warn imports to non-asset /public files if (specifier[0] === '/' && !(config.assetsInclude(cleanUrl(specifier)) || urlRE.test(specifier)) && checkPublicFile(specifier, config)) { throw new Error(`Cannot import non-asset file ${specifier} which is inside /public. ` + `JS/CSS files inside /public are copied as-is on build and ` + `can only be referenced via