Test project for media files management.
import * as fs$j from 'node:fs';
import fs__default, { promises as promises$1 } from 'node:fs';
import fsp, { lstat as lstat$3, readdir as readdir$5, readlink, realpath as realpath$2 } from 'node:fs/promises';
import path$n, { win32 as win32$1, posix as posix$1, isAbsolute as isAbsolute$1, join as join$2, extname as extname$1, dirname as dirname$2, relative as relative$2, basename as basename$2 } from 'node:path';
import { fileURLToPath, URL as URL$3, parse as parse$h, pathToFileURL } from 'node:url';
import { promisify as promisify$4, format as format$2, inspect } from 'node:util';
import { performance as performance$1 } from 'node:perf_hooks';
import { createRequire as createRequire$1, builtinModules } from 'node:module';
import require$$0$3 from 'tty';
import require$$0$4, { win32, posix, isAbsolute, resolve as resolve$3, relative as relative$1, basename as basename$1, extname, dirname as dirname$1, join as join$1, sep as sep$1, normalize as normalize$1 } from 'path';
import esbuild, { transform as transform$1, formatMessages, build as build$3 } from 'esbuild';
import { CLIENT_ENTRY, OPTIMIZABLE_ENTRY_RE, wildcardHosts, loopbackHosts, FS_PREFIX, CLIENT_PUBLIC_PATH, ENV_PUBLIC_PATH, DEFAULT_ASSETS_INLINE_LIMIT, CSS_LANGS_RE, ESBUILD_MODULES_TARGET, SPECIAL_QUERY_RE, ENV_ENTRY, DEP_VERSION_RE, DEFAULT_MAIN_FIELDS, DEFAULT_EXTENSIONS, KNOWN_ASSET_TYPES, JS_TYPES_RE, METADATA_FILENAME, VITE_PACKAGE_DIR, DEFAULT_DEV_PORT, CLIENT_DIR, VERSION, DEFAULT_PREVIEW_PORT, DEFAULT_ASSETS_RE, DEFAULT_CONFIG_FILES } from '../constants.js';
import * as require$$0$2 from 'fs';
import require$$0__default, { lstatSync, readdir as readdir$4, readdirSync, readlinkSync, realpathSync as realpathSync$1, existsSync, readFileSync, statSync as statSync$1 } from 'fs';
import { EventEmitter as EventEmitter$4 } from 'node:events';
import Stream$1 from 'node:stream';
import { StringDecoder } from 'node:string_decoder';
import { exec, execSync } from 'node:child_process';
import { createServer as createServer$3, STATUS_CODES, get as get$2 } from 'node:http';
import { createServer as createServer$2, get as get$1 } from 'node:https';
import require$$0$5 from 'util';
import require$$4$1 from 'net';
import require$$0$7 from 'events';
import require$$0$9 from 'url';
import require$$1 from 'http';
import require$$0$6 from 'stream';
import require$$2 from 'os';
import require$$2$1 from 'child_process';
import os$5 from 'node:os';
import { createHash as createHash$2 } from 'node:crypto';
import { promises } from 'node:dns';
import require$$3$1 from 'crypto';
import require$$0$8, { createRequire as createRequire$2 } from 'module';
import assert$1 from 'node:assert';
import v8 from 'node:v8';
import { Worker as Worker$1 } from 'node:worker_threads';
import { Buffer as Buffer$1 } from 'node:buffer';
import { parseAstAsync, parseAst } from 'rollup/parseAst';
import * as qs from 'querystring';
import readline from 'node:readline';
import zlib$1 from 'zlib';
import require$$0$a from 'buffer';
import require$$1$1 from 'https';
import require$$4$2 from 'tls';
import require$$4$3 from 'assert';
import { gzip } from 'node:zlib';
import { fileURLToPath as __cjs_fileURLToPath } from 'node:url';
import { dirname as __cjs_dirname } from 'node:path';
import { createRequire as __cjs_createRequire } from 'node:module';
const __filename = __cjs_fileURLToPath(import.meta.url);
const __dirname = __cjs_dirname(__filename);
const require = __cjs_createRequire(import.meta.url);
const __require = require;
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
function getDefaultExportFromCjs (x) {
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
}
function getAugmentedNamespace(n) {
if (n.__esModule) return n;
var f = n.default;
if (typeof f == "function") {
var a = function a () {
if (this instanceof a) {
return Reflect.construct(f, arguments, this.constructor);
}
return f.apply(this, arguments);
};
a.prototype = f.prototype;
} else a = {};
Object.defineProperty(a, '__esModule', {value: true});
Object.keys(n).forEach(function (k) {
var d = Object.getOwnPropertyDescriptor(n, k);
Object.defineProperty(a, k, d.get ? d : {
enumerable: true,
get: function () {
return n[k];
}
});
});
return a;
}
function commonjsRequire(path) {
throw new Error('Could not dynamically require "' + path + '". Please configure the dynamicRequireTargets or/and ignoreDynamicRequires option of @rollup/plugin-commonjs appropriately for this require call to work.');
}
var picocolors = {exports: {}};
let argv = process.argv || [],
env$1 = process.env;
let isColorSupported =
!("NO_COLOR" in env$1 || argv.includes("--no-color")) &&
("FORCE_COLOR" in env$1 ||
argv.includes("--color") ||
process.platform === "win32" ||
(commonjsRequire != null && require$$0$3.isatty(1) && env$1.TERM !== "dumb") ||
"CI" in env$1);
let formatter =
(open, close, replace = open) =>
input => {
let string = "" + input;
let index = string.indexOf(close, open.length);
return ~index
? open + replaceClose(string, close, replace, index) + close
: open + string + close
};
let replaceClose = (string, close, replace, index) => {
let result = "";
let cursor = 0;
do {
result += string.substring(cursor, index) + replace;
cursor = index + close.length;
index = string.indexOf(close, cursor);
} while (~index)
return result + string.substring(cursor)
};
let createColors = (enabled = isColorSupported) => {
let init = enabled ? formatter : () => String;
return {
isColorSupported: enabled,
reset: init("\x1b[0m", "\x1b[0m"),
bold: init("\x1b[1m", "\x1b[22m", "\x1b[22m\x1b[1m"),
dim: init("\x1b[2m", "\x1b[22m", "\x1b[22m\x1b[2m"),
italic: init("\x1b[3m", "\x1b[23m"),
underline: init("\x1b[4m", "\x1b[24m"),
inverse: init("\x1b[7m", "\x1b[27m"),
hidden: init("\x1b[8m", "\x1b[28m"),
strikethrough: init("\x1b[9m", "\x1b[29m"),
black: init("\x1b[30m", "\x1b[39m"),
red: init("\x1b[31m", "\x1b[39m"),
green: init("\x1b[32m", "\x1b[39m"),
yellow: init("\x1b[33m", "\x1b[39m"),
blue: init("\x1b[34m", "\x1b[39m"),
magenta: init("\x1b[35m", "\x1b[39m"),
cyan: init("\x1b[36m", "\x1b[39m"),
white: init("\x1b[37m", "\x1b[39m"),
gray: init("\x1b[90m", "\x1b[39m"),
bgBlack: init("\x1b[40m", "\x1b[49m"),
bgRed: init("\x1b[41m", "\x1b[49m"),
bgGreen: init("\x1b[42m", "\x1b[49m"),
bgYellow: init("\x1b[43m", "\x1b[49m"),
bgBlue: init("\x1b[44m", "\x1b[49m"),
bgMagenta: init("\x1b[45m", "\x1b[49m"),
bgCyan: init("\x1b[46m", "\x1b[49m"),
bgWhite: init("\x1b[47m", "\x1b[49m"),
}
};
picocolors.exports = createColors();
picocolors.exports.createColors = createColors;
var picocolorsExports = picocolors.exports;
var colors$1 = /*@__PURE__*/getDefaultExportFromCjs(picocolorsExports);
function matches$1(pattern, importee) {
if (pattern instanceof RegExp) {
return pattern.test(importee);
}
if (importee.length < pattern.length) {
return false;
}
if (importee === pattern) {
return true;
}
// eslint-disable-next-line prefer-template
return importee.startsWith(pattern + '/');
}
function getEntries({ entries, customResolver }) {
if (!entries) {
return [];
}
const resolverFunctionFromOptions = resolveCustomResolver(customResolver);
if (Array.isArray(entries)) {
return entries.map((entry) => {
return {
find: entry.find,
replacement: entry.replacement,
resolverFunction: resolveCustomResolver(entry.customResolver) || resolverFunctionFromOptions
};
});
}
return Object.entries(entries).map(([key, value]) => {
return { find: key, replacement: value, resolverFunction: resolverFunctionFromOptions };
});
}
function getHookFunction(hook) {
if (typeof hook === 'function') {
return hook;
}
if (hook && 'handler' in hook && typeof hook.handler === 'function') {
return hook.handler;
}
return null;
}
function resolveCustomResolver(customResolver) {
if (typeof customResolver === 'function') {
return customResolver;
}
if (customResolver) {
return getHookFunction(customResolver.resolveId);
}
return null;
}
function alias$1(options = {}) {
const entries = getEntries(options);
if (entries.length === 0) {
return {
name: 'alias',
resolveId: () => null
};
}
return {
name: 'alias',
async buildStart(inputOptions) {
await Promise.all([...(Array.isArray(options.entries) ? options.entries : []), options].map(({ customResolver }) => { var _a; return customResolver && ((_a = getHookFunction(customResolver.buildStart)) === null || _a === void 0 ? void 0 : _a.call(this, inputOptions)); }));
},
resolveId(importee, importer, resolveOptions) {
// First match is supposed to be the correct one
const matchedEntry = entries.find((entry) => matches$1(entry.find, importee));
if (!matchedEntry) {
return null;
}
const updatedId = importee.replace(matchedEntry.find, matchedEntry.replacement);
if (matchedEntry.resolverFunction) {
return matchedEntry.resolverFunction.call(this, updatedId, importer, resolveOptions);
}
return this.resolve(updatedId, importer, Object.assign({ skipSelf: true }, resolveOptions)).then((resolved) => {
if (resolved)
return resolved;
if (!require$$0$4.isAbsolute(updatedId)) {
this.warn(`rewrote ${importee} to ${updatedId} but was not an abolute path and was not handled by other plugins. ` +
`This will lead to duplicated modules for the same path. ` +
`To avoid duplicating modules, you should resolve to an absolute path.`);
}
return { id: updatedId };
});
}
};
}
const VALID_ID_PREFIX = `/@id/`;
const NULL_BYTE_PLACEHOLDER = `__x00__`;
let SOURCEMAPPING_URL = "sourceMa";
SOURCEMAPPING_URL += "ppingURL";
const VITE_RUNTIME_SOURCEMAPPING_SOURCE = "//# sourceMappingSource=vite-runtime";
const isWindows$3 = typeof process !== "undefined" && process.platform === "win32";
function wrapId$1(id) {
return id.startsWith(VALID_ID_PREFIX) ? id : VALID_ID_PREFIX + id.replace("\0", NULL_BYTE_PLACEHOLDER);
}
function unwrapId$1(id) {
return id.startsWith(VALID_ID_PREFIX) ? id.slice(VALID_ID_PREFIX.length).replace(NULL_BYTE_PLACEHOLDER, "\0") : id;
}
const windowsSlashRE = /\\/g;
function slash$1(p) {
return p.replace(windowsSlashRE, "/");
}
const postfixRE = /[?#].*$/;
function cleanUrl(url) {
return url.replace(postfixRE, "");
}
function withTrailingSlash(path) {
if (path[path.length - 1] !== "/") {
return `${path}/`;
}
return path;
}
const AsyncFunction = async function() {
}.constructor;
const asyncFunctionDeclarationPaddingLineCount = /* @__PURE__ */ (() => {
const body = "/*code*/";
const source = new AsyncFunction("a", "b", body).toString();
return source.slice(0, source.indexOf(body)).split("\n").length - 1;
})();
// @ts-check
/** @typedef { import('estree').BaseNode} BaseNode */
/** @typedef {{
skip: () => void;
remove: () => void;
replace: (node: BaseNode) => void;
}} WalkerContext */
let WalkerBase$1 = class WalkerBase {
constructor() {
/** @type {boolean} */
this.should_skip = false;
/** @type {boolean} */
this.should_remove = false;
/** @type {BaseNode | null} */
this.replacement = null;
/** @type {WalkerContext} */
this.context = {
skip: () => (this.should_skip = true),
remove: () => (this.should_remove = true),
replace: (node) => (this.replacement = node)
};
}
/**
*
* @param {any} parent
* @param {string} prop
* @param {number} index
* @param {BaseNode} node
*/
replace(parent, prop, index, node) {
if (parent) {
if (index !== null) {
parent[prop][index] = node;
} else {
parent[prop] = node;
}
}
}
/**
*
* @param {any} parent
* @param {string} prop
* @param {number} index
*/
remove(parent, prop, index) {
if (parent) {
if (index !== null) {
parent[prop].splice(index, 1);
} else {
delete parent[prop];
}
}
}
};
// @ts-check
/** @typedef { import('estree').BaseNode} BaseNode */
/** @typedef { import('./walker.js').WalkerContext} WalkerContext */
/** @typedef {(
* this: WalkerContext,
* node: BaseNode,
* parent: BaseNode,
* key: string,
* index: number
* ) => void} SyncHandler */
let SyncWalker$1 = class SyncWalker extends WalkerBase$1 {
/**
*
* @param {SyncHandler} enter
* @param {SyncHandler} leave
*/
constructor(enter, leave) {
super();
/** @type {SyncHandler} */
this.enter = enter;
/** @type {SyncHandler} */
this.leave = leave;
}
/**
*
* @param {BaseNode} node
* @param {BaseNode} parent
* @param {string} [prop]
* @param {number} [index]
* @returns {BaseNode}
*/
visit(node, parent, prop, index) {
if (node) {
if (this.enter) {
const _should_skip = this.should_skip;
const _should_remove = this.should_remove;
const _replacement = this.replacement;
this.should_skip = false;
this.should_remove = false;
this.replacement = null;
this.enter.call(this.context, node, parent, prop, index);
if (this.replacement) {
node = this.replacement;
this.replace(parent, prop, index, node);
}
if (this.should_remove) {
this.remove(parent, prop, index);
}
const skipped = this.should_skip;
const removed = this.should_remove;
this.should_skip = _should_skip;
this.should_remove = _should_remove;
this.replacement = _replacement;
if (skipped) return node;
if (removed) return null;
}
for (const key in node) {
const value = node[key];
if (typeof value !== "object") {
continue;
} else if (Array.isArray(value)) {
for (let i = 0; i < value.length; i += 1) {
if (value[i] !== null && typeof value[i].type === 'string') {
if (!this.visit(value[i], node, key, i)) {
// removed
i--;
}
}
}
} else if (value !== null && typeof value.type === "string") {
this.visit(value, node, key, null);
}
}
if (this.leave) {
const _replacement = this.replacement;
const _should_remove = this.should_remove;
this.replacement = null;
this.should_remove = false;
this.leave.call(this.context, node, parent, prop, index);
if (this.replacement) {
node = this.replacement;
this.replace(parent, prop, index, node);
}
if (this.should_remove) {
this.remove(parent, prop, index);
}
const removed = this.should_remove;
this.replacement = _replacement;
this.should_remove = _should_remove;
if (removed) return null;
}
}
return node;
}
};
// @ts-check
/** @typedef { import('estree').BaseNode} BaseNode */
/** @typedef { import('./sync.js').SyncHandler} SyncHandler */
/** @typedef { import('./async.js').AsyncHandler} AsyncHandler */
/**
*
* @param {BaseNode} ast
* @param {{
* enter?: SyncHandler
* leave?: SyncHandler
* }} walker
* @returns {BaseNode}
*/
function walk$3(ast, { enter, leave }) {
const instance = new SyncWalker$1(enter, leave);
return instance.visit(ast, null);
}
var utils$k = {};
const path$m = require$$0$4;
const WIN_SLASH = '\\\\/';
const WIN_NO_SLASH = `[^${WIN_SLASH}]`;
/**
* Posix glob regex
*/
const DOT_LITERAL = '\\.';
const PLUS_LITERAL = '\\+';
const QMARK_LITERAL = '\\?';
const SLASH_LITERAL = '\\/';
const ONE_CHAR = '(?=.)';
const QMARK = '[^/]';
const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`;
const START_ANCHOR = `(?:^|${SLASH_LITERAL})`;
const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`;
const NO_DOT = `(?!${DOT_LITERAL})`;
const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`;
const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`;
const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`;
const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`;
const STAR$1 = `${QMARK}*?`;
const POSIX_CHARS = {
DOT_LITERAL,
PLUS_LITERAL,
QMARK_LITERAL,
SLASH_LITERAL,
ONE_CHAR,
QMARK,
END_ANCHOR,
DOTS_SLASH,
NO_DOT,
NO_DOTS,
NO_DOT_SLASH,
NO_DOTS_SLASH,
QMARK_NO_DOT,
STAR: STAR$1,
START_ANCHOR
};
/**
* Windows glob regex
*/
const WINDOWS_CHARS = {
...POSIX_CHARS,
SLASH_LITERAL: `[${WIN_SLASH}]`,
QMARK: WIN_NO_SLASH,
STAR: `${WIN_NO_SLASH}*?`,
DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`,
NO_DOT: `(?!${DOT_LITERAL})`,
NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`,
NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
QMARK_NO_DOT: `[^.${WIN_SLASH}]`,
START_ANCHOR: `(?:^|[${WIN_SLASH}])`,
END_ANCHOR: `(?:[${WIN_SLASH}]|$)`
};
/**
* POSIX Bracket Regex
*/
const POSIX_REGEX_SOURCE$1 = {
alnum: 'a-zA-Z0-9',
alpha: 'a-zA-Z',
ascii: '\\x00-\\x7F',
blank: ' \\t',
cntrl: '\\x00-\\x1F\\x7F',
digit: '0-9',
graph: '\\x21-\\x7E',
lower: 'a-z',
print: '\\x20-\\x7E ',
punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~',
space: ' \\t\\r\\n\\v\\f',
upper: 'A-Z',
word: 'A-Za-z0-9_',
xdigit: 'A-Fa-f0-9'
};
var constants$6 = {
MAX_LENGTH: 1024 * 64,
POSIX_REGEX_SOURCE: POSIX_REGEX_SOURCE$1,
// regular expressions
REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g,
REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/,
REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/,
REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g,
REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g,
REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g,
// Replace globs with equivalent patterns to reduce parsing time.
REPLACEMENTS: {
'***': '*',
'**/**': '**',
'**/**/**': '**'
},
// Digits
CHAR_0: 48, /* 0 */
CHAR_9: 57, /* 9 */
// Alphabet chars.
CHAR_UPPERCASE_A: 65, /* A */
CHAR_LOWERCASE_A: 97, /* a */
CHAR_UPPERCASE_Z: 90, /* Z */
CHAR_LOWERCASE_Z: 122, /* z */
CHAR_LEFT_PARENTHESES: 40, /* ( */
CHAR_RIGHT_PARENTHESES: 41, /* ) */
CHAR_ASTERISK: 42, /* * */
// Non-alphabetic chars.
CHAR_AMPERSAND: 38, /* & */
CHAR_AT: 64, /* @ */
CHAR_BACKWARD_SLASH: 92, /* \ */
CHAR_CARRIAGE_RETURN: 13, /* \r */
CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */
CHAR_COLON: 58, /* : */
CHAR_COMMA: 44, /* , */
CHAR_DOT: 46, /* . */
CHAR_DOUBLE_QUOTE: 34, /* " */
CHAR_EQUAL: 61, /* = */
CHAR_EXCLAMATION_MARK: 33, /* ! */
CHAR_FORM_FEED: 12, /* \f */
CHAR_FORWARD_SLASH: 47, /* / */
CHAR_GRAVE_ACCENT: 96, /* ` */
CHAR_HASH: 35, /* # */
CHAR_HYPHEN_MINUS: 45, /* - */
CHAR_LEFT_ANGLE_BRACKET: 60, /* < */
CHAR_LEFT_CURLY_BRACE: 123, /* { */
CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */
CHAR_LINE_FEED: 10, /* \n */
CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */
CHAR_PERCENT: 37, /* % */
CHAR_PLUS: 43, /* + */
CHAR_QUESTION_MARK: 63, /* ? */
CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */
CHAR_RIGHT_CURLY_BRACE: 125, /* } */
CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */
CHAR_SEMICOLON: 59, /* ; */
CHAR_SINGLE_QUOTE: 39, /* ' */
CHAR_SPACE: 32, /* */
CHAR_TAB: 9, /* \t */
CHAR_UNDERSCORE: 95, /* _ */
CHAR_VERTICAL_LINE: 124, /* | */
CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */
SEP: path$m.sep,
/**
* Create EXTGLOB_CHARS
*/
extglobChars(chars) {
return {
'!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` },
'?': { type: 'qmark', open: '(?:', close: ')?' },
'+': { type: 'plus', open: '(?:', close: ')+' },
'*': { type: 'star', open: '(?:', close: ')*' },
'@': { type: 'at', open: '(?:', close: ')' }
};
},
/**
* Create GLOB_CHARS
*/
globChars(win32) {
return win32 === true ? WINDOWS_CHARS : POSIX_CHARS;
}
};
(function (exports) {
const path = require$$0$4;
const win32 = process.platform === 'win32';
const {
REGEX_BACKSLASH,
REGEX_REMOVE_BACKSLASH,
REGEX_SPECIAL_CHARS,
REGEX_SPECIAL_CHARS_GLOBAL
} = constants$6;
exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str);
exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str);
exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1');
exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/');
exports.removeBackslashes = str => {
return str.replace(REGEX_REMOVE_BACKSLASH, match => {
return match === '\\' ? '' : match;
});
};
exports.supportsLookbehinds = () => {
const segs = process.version.slice(1).split('.').map(Number);
if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) {
return true;
}
return false;
};
exports.isWindows = options => {
if (options && typeof options.windows === 'boolean') {
return options.windows;
}
return win32 === true || path.sep === '\\';
};
exports.escapeLast = (input, char, lastIdx) => {
const idx = input.lastIndexOf(char, lastIdx);
if (idx === -1) return input;
if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1);
return `${input.slice(0, idx)}\\${input.slice(idx)}`;
};
exports.removePrefix = (input, state = {}) => {
let output = input;
if (output.startsWith('./')) {
output = output.slice(2);
state.prefix = './';
}
return output;
};
exports.wrapOutput = (input, state = {}, options = {}) => {
const prepend = options.contains ? '' : '^';
const append = options.contains ? '' : '$';
let output = `${prepend}(?:${input})${append}`;
if (state.negated === true) {
output = `(?:^(?!${output}).*$)`;
}
return output;
};
} (utils$k));
const utils$j = utils$k;
const {
CHAR_ASTERISK, /* * */
CHAR_AT, /* @ */
CHAR_BACKWARD_SLASH, /* \ */
CHAR_COMMA: CHAR_COMMA$1, /* , */
CHAR_DOT: CHAR_DOT$1, /* . */
CHAR_EXCLAMATION_MARK, /* ! */
CHAR_FORWARD_SLASH, /* / */
CHAR_LEFT_CURLY_BRACE: CHAR_LEFT_CURLY_BRACE$1, /* { */
CHAR_LEFT_PARENTHESES: CHAR_LEFT_PARENTHESES$1, /* ( */
CHAR_LEFT_SQUARE_BRACKET: CHAR_LEFT_SQUARE_BRACKET$1, /* [ */
CHAR_PLUS, /* + */
CHAR_QUESTION_MARK, /* ? */
CHAR_RIGHT_CURLY_BRACE: CHAR_RIGHT_CURLY_BRACE$1, /* } */
CHAR_RIGHT_PARENTHESES: CHAR_RIGHT_PARENTHESES$1, /* ) */
CHAR_RIGHT_SQUARE_BRACKET: CHAR_RIGHT_SQUARE_BRACKET$1 /* ] */
} = constants$6;
const isPathSeparator = code => {
return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH;
};
const depth = token => {
if (token.isPrefix !== true) {
token.depth = token.isGlobstar ? Infinity : 1;
}
};
/**
* Quickly scans a glob pattern and returns an object with a handful of
* useful properties, like `isGlob`, `path` (the leading non-glob, if it exists),
* `glob` (the actual pattern), `negated` (true if the path starts with `!` but not
* with `!(`) and `negatedExtglob` (true if the path starts with `!(`).
*
* ```js
* const pm = require('picomatch');
* console.log(pm.scan('foo/bar/*.js'));
* { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' }
* ```
* @param {String} `str`
* @param {Object} `options`
* @return {Object} Returns an object with tokens and regex source string.
* @api public
*/
const scan$2 = (input, options) => {
const opts = options || {};
const length = input.length - 1;
const scanToEnd = opts.parts === true || opts.scanToEnd === true;
const slashes = [];
const tokens = [];
const parts = [];
let str = input;
let index = -1;
let start = 0;
let lastIndex = 0;
let isBrace = false;
let isBracket = false;
let isGlob = false;
let isExtglob = false;
let isGlobstar = false;
let braceEscaped = false;
let backslashes = false;
let negated = false;
let negatedExtglob = false;
let finished = false;
let braces = 0;
let prev;
let code;
let token = { value: '', depth: 0, isGlob: false };
const eos = () => index >= length;
const peek = () => str.charCodeAt(index + 1);
const advance = () => {
prev = code;
return str.charCodeAt(++index);
};
while (index < length) {
code = advance();
let next;
if (code === CHAR_BACKWARD_SLASH) {
backslashes = token.backslashes = true;
code = advance();
if (code === CHAR_LEFT_CURLY_BRACE$1) {
braceEscaped = true;
}
continue;
}
if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE$1) {
braces++;
while (eos() !== true && (code = advance())) {
if (code === CHAR_BACKWARD_SLASH) {
backslashes = token.backslashes = true;
advance();
continue;
}
if (code === CHAR_LEFT_CURLY_BRACE$1) {
braces++;
continue;
}
if (braceEscaped !== true && code === CHAR_DOT$1 && (code = advance()) === CHAR_DOT$1) {
isBrace = token.isBrace = true;
isGlob = token.isGlob = true;
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
if (braceEscaped !== true && code === CHAR_COMMA$1) {
isBrace = token.isBrace = true;
isGlob = token.isGlob = true;
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
if (code === CHAR_RIGHT_CURLY_BRACE$1) {
braces--;
if (braces === 0) {
braceEscaped = false;
isBrace = token.isBrace = true;
finished = true;
break;
}
}
}
if (scanToEnd === true) {
continue;
}
break;
}
if (code === CHAR_FORWARD_SLASH) {
slashes.push(index);
tokens.push(token);
token = { value: '', depth: 0, isGlob: false };
if (finished === true) continue;
if (prev === CHAR_DOT$1 && index === (start + 1)) {
start += 2;
continue;
}
lastIndex = index + 1;
continue;
}
if (opts.noext !== true) {
const isExtglobChar = code === CHAR_PLUS
|| code === CHAR_AT
|| code === CHAR_ASTERISK
|| code === CHAR_QUESTION_MARK
|| code === CHAR_EXCLAMATION_MARK;
if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES$1) {
isGlob = token.isGlob = true;
isExtglob = token.isExtglob = true;
finished = true;
if (code === CHAR_EXCLAMATION_MARK && index === start) {
negatedExtglob = true;
}
if (scanToEnd === true) {
while (eos() !== true && (code = advance())) {
if (code === CHAR_BACKWARD_SLASH) {
backslashes = token.backslashes = true;
code = advance();
continue;
}
if (code === CHAR_RIGHT_PARENTHESES$1) {
isGlob = token.isGlob = true;
finished = true;
break;
}
}
continue;
}
break;
}
}
if (code === CHAR_ASTERISK) {
if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true;
isGlob = token.isGlob = true;
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
if (code === CHAR_QUESTION_MARK) {
isGlob = token.isGlob = true;
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
if (code === CHAR_LEFT_SQUARE_BRACKET$1) {
while (eos() !== true && (next = advance())) {
if (next === CHAR_BACKWARD_SLASH) {
backslashes = token.backslashes = true;
advance();
continue;
}
if (next === CHAR_RIGHT_SQUARE_BRACKET$1) {
isBracket = token.isBracket = true;
isGlob = token.isGlob = true;
finished = true;
break;
}
}
if (scanToEnd === true) {
continue;
}
break;
}
if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) {
negated = token.negated = true;
start++;
continue;
}
if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES$1) {
isGlob = token.isGlob = true;
if (scanToEnd === true) {
while (eos() !== true && (code = advance())) {
if (code === CHAR_LEFT_PARENTHESES$1) {
backslashes = token.backslashes = true;
code = advance();
continue;
}
if (code === CHAR_RIGHT_PARENTHESES$1) {
finished = true;
break;
}
}
continue;
}
break;
}
if (isGlob === true) {
finished = true;
if (scanToEnd === true) {
continue;
}
break;
}
}
if (opts.noext === true) {
isExtglob = false;
isGlob = false;
}
let base = str;
let prefix = '';
let glob = '';
if (start > 0) {
prefix = str.slice(0, start);
str = str.slice(start);
lastIndex -= start;
}
if (base && isGlob === true && lastIndex > 0) {
base = str.slice(0, lastIndex);
glob = str.slice(lastIndex);
} else if (isGlob === true) {
base = '';
glob = str;
} else {
base = str;
}
if (base && base !== '' && base !== '/' && base !== str) {
if (isPathSeparator(base.charCodeAt(base.length - 1))) {
base = base.slice(0, -1);
}
}
if (opts.unescape === true) {
if (glob) glob = utils$j.removeBackslashes(glob);
if (base && backslashes === true) {
base = utils$j.removeBackslashes(base);
}
}
const state = {
prefix,
input,
start,
base,
glob,
isBrace,
isBracket,
isGlob,
isExtglob,
isGlobstar,
negated,
negatedExtglob
};
if (opts.tokens === true) {
state.maxDepth = 0;
if (!isPathSeparator(code)) {
tokens.push(token);
}
state.tokens = tokens;
}
if (opts.parts === true || opts.tokens === true) {
let prevIndex;
for (let idx = 0; idx < slashes.length; idx++) {
const n = prevIndex ? prevIndex + 1 : start;
const i = slashes[idx];
const value = input.slice(n, i);
if (opts.tokens) {
if (idx === 0 && start !== 0) {
tokens[idx].isPrefix = true;
tokens[idx].value = prefix;
} else {
tokens[idx].value = value;
}
depth(tokens[idx]);
state.maxDepth += tokens[idx].depth;
}
if (idx !== 0 || value !== '') {
parts.push(value);
}
prevIndex = i;
}
if (prevIndex && prevIndex + 1 < input.length) {
const value = input.slice(prevIndex + 1);
parts.push(value);
if (opts.tokens) {
tokens[tokens.length - 1].value = value;
depth(tokens[tokens.length - 1]);
state.maxDepth += tokens[tokens.length - 1].depth;
}
}
state.slashes = slashes;
state.parts = parts;
}
return state;
};
var scan_1 = scan$2;
const constants$5 = constants$6;
const utils$i = utils$k;
/**
* Constants
*/
const {
MAX_LENGTH: MAX_LENGTH$1,
POSIX_REGEX_SOURCE,
REGEX_NON_SPECIAL_CHARS,
REGEX_SPECIAL_CHARS_BACKREF,
REPLACEMENTS
} = constants$5;
/**
* Helpers
*/
const expandRange = (args, options) => {
if (typeof options.expandRange === 'function') {
return options.expandRange(...args, options);
}
args.sort();
const value = `[${args.join('-')}]`;
return value;
};
/**
* Create the message for a syntax error
*/
const syntaxError = (type, char) => {
return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`;
};
/**
* Parse the given input string.
* @param {String} input
* @param {Object} options
* @return {Object}
*/
const parse$g = (input, options) => {
if (typeof input !== 'string') {
throw new TypeError('Expected a string');
}
input = REPLACEMENTS[input] || input;
const opts = { ...options };
const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH$1, opts.maxLength) : MAX_LENGTH$1;
let len = input.length;
if (len > max) {
throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
}
const bos = { type: 'bos', value: '', output: opts.prepend || '' };
const tokens = [bos];
const capture = opts.capture ? '' : '?:';
const win32 = utils$i.isWindows(options);
// create constants based on platform, for windows or posix
const PLATFORM_CHARS = constants$5.globChars(win32);
const EXTGLOB_CHARS = constants$5.extglobChars(PLATFORM_CHARS);
const {
DOT_LITERAL,
PLUS_LITERAL,
SLASH_LITERAL,
ONE_CHAR,
DOTS_SLASH,
NO_DOT,
NO_DOT_SLASH,
NO_DOTS_SLASH,
QMARK,
QMARK_NO_DOT,
STAR,
START_ANCHOR
} = PLATFORM_CHARS;
const globstar = opts => {
return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;
};
const nodot = opts.dot ? '' : NO_DOT;
const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT;
let star = opts.bash === true ? globstar(opts) : STAR;
if (opts.capture) {
star = `(${star})`;
}
// minimatch options support
if (typeof opts.noext === 'boolean') {
opts.noextglob = opts.noext;
}
const state = {
input,
index: -1,
start: 0,
dot: opts.dot === true,
consumed: '',
output: '',
prefix: '',
backtrack: false,
negated: false,
brackets: 0,
braces: 0,
parens: 0,
quotes: 0,
globstar: false,
tokens
};
input = utils$i.removePrefix(input, state);
len = input.length;
const extglobs = [];
const braces = [];
const stack = [];
let prev = bos;
let value;
/**
* Tokenizing helpers
*/
const eos = () => state.index === len - 1;
const peek = state.peek = (n = 1) => input[state.index + n];
const advance = state.advance = () => input[++state.index] || '';
const remaining = () => input.slice(state.index + 1);
const consume = (value = '', num = 0) => {
state.consumed += value;
state.index += num;
};
const append = token => {
state.output += token.output != null ? token.output : token.value;
consume(token.value);
};
const negate = () => {
let count = 1;
while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) {
advance();
state.start++;
count++;
}
if (count % 2 === 0) {
return false;
}
state.negated = true;
state.start++;
return true;
};
const increment = type => {
state[type]++;
stack.push(type);
};
const decrement = type => {
state[type]--;
stack.pop();
};
/**
* Push tokens onto the tokens array. This helper speeds up
* tokenizing by 1) helping us avoid backtracking as much as possible,
* and 2) helping us avoid creating extra tokens when consecutive
* characters are plain text. This improves performance and simplifies
* lookbehinds.
*/
const push = tok => {
if (prev.type === 'globstar') {
const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace');
const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren'));
if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) {
state.output = state.output.slice(0, -prev.output.length);
prev.type = 'star';
prev.value = '*';
prev.output = star;
state.output += prev.output;
}
}
if (extglobs.length && tok.type !== 'paren') {
extglobs[extglobs.length - 1].inner += tok.value;
}
if (tok.value || tok.output) append(tok);
if (prev && prev.type === 'text' && tok.type === 'text') {
prev.value += tok.value;
prev.output = (prev.output || '') + tok.value;
return;
}
tok.prev = prev;
tokens.push(tok);
prev = tok;
};
const extglobOpen = (type, value) => {
const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' };
token.prev = prev;
token.parens = state.parens;
token.output = state.output;
const output = (opts.capture ? '(' : '') + token.open;
increment('parens');
push({ type, value, output: state.output ? '' : ONE_CHAR });
push({ type: 'paren', extglob: true, value: advance(), output });
extglobs.push(token);
};
const extglobClose = token => {
let output = token.close + (opts.capture ? ')' : '');
let rest;
if (token.type === 'negate') {
let extglobStar = star;
if (token.inner && token.inner.length > 1 && token.inner.includes('/')) {
extglobStar = globstar(opts);
}
if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) {
output = token.close = `)$))${extglobStar}`;
}
if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) {
// Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis.
// In this case, we need to parse the string and use it in the output of the original pattern.
// Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`.
//
// Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`.
const expression = parse$g(rest, { ...options, fastpaths: false }).output;
output = token.close = `)${expression})${extglobStar})`;
}
if (token.prev.type === 'bos') {
state.negatedExtglob = true;
}
}
push({ type: 'paren', extglob: true, value, output });
decrement('parens');
};
/**
* Fast paths
*/
if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) {
let backslashes = false;
let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => {
if (first === '\\') {
backslashes = true;
return m;
}
if (first === '?') {
if (esc) {
return esc + first + (rest ? QMARK.repeat(rest.length) : '');
}
if (index === 0) {
return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : '');
}
return QMARK.repeat(chars.length);
}
if (first === '.') {
return DOT_LITERAL.repeat(chars.length);
}
if (first === '*') {
if (esc) {
return esc + first + (rest ? star : '');
}
return star;
}
return esc ? m : `\\${m}`;
});
if (backslashes === true) {
if (opts.unescape === true) {
output = output.replace(/\\/g, '');
} else {
output = output.replace(/\\+/g, m => {
return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : '');
});
}
}
if (output === input && opts.contains === true) {
state.output = input;
return state;
}
state.output = utils$i.wrapOutput(output, state, options);
return state;
}
/**
* Tokenize input until we reach end-of-string
*/
while (!eos()) {
value = advance();
if (value === '\u0000') {
continue;
}
/**
* Escaped characters
*/
if (value === '\\') {
const next = peek();
if (next === '/' && opts.bash !== true) {
continue;
}
if (next === '.' || next === ';') {
continue;
}
if (!next) {
value += '\\';
push({ type: 'text', value });
continue;
}
// collapse slashes to reduce potential for exploits
const match = /^\\+/.exec(remaining());
let slashes = 0;
if (match && match[0].length > 2) {
slashes = match[0].length;
state.index += slashes;
if (slashes % 2 !== 0) {
value += '\\';
}
}
if (opts.unescape === true) {
value = advance();
} else {
value += advance();
}
if (state.brackets === 0) {
push({ type: 'text', value });
continue;
}
}
/**
* If we're inside a regex character class, continue
* until we reach the closing bracket.
*/
if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) {
if (opts.posix !== false && value === ':') {
const inner = prev.value.slice(1);
if (inner.includes('[')) {
prev.posix = true;
if (inner.includes(':')) {
const idx = prev.value.lastIndexOf('[');
const pre = prev.value.slice(0, idx);
const rest = prev.value.slice(idx + 2);
const posix = POSIX_REGEX_SOURCE[rest];
if (posix) {
prev.value = pre + posix;
state.backtrack = true;
advance();
if (!bos.output && tokens.indexOf(prev) === 1) {
bos.output = ONE_CHAR;
}
continue;
}
}
}
}
if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) {
value = `\\${value}`;
}
if (value === ']' && (prev.value === '[' || prev.value === '[^')) {
value = `\\${value}`;
}
if (opts.posix === true && value === '!' && prev.value === '[') {
value = '^';
}
prev.value += value;
append({ value });
continue;
}
/**
* If we're inside a quoted string, continue
* until we reach the closing double quote.
*/
if (state.quotes === 1 && value !== '"') {
value = utils$i.escapeRegex(value);
prev.value += value;
append({ value });
continue;
}
/**
* Double quotes
*/
if (value === '"') {
state.quotes = state.quotes === 1 ? 0 : 1;
if (opts.keepQuotes === true) {
push({ type: 'text', value });
}
continue;
}
/**
* Parentheses
*/
if (value === '(') {
increment('parens');
push({ type: 'paren', value });
continue;
}
if (value === ')') {
if (state.parens === 0 && opts.strictBrackets === true) {
throw new SyntaxError(syntaxError('opening', '('));
}
const extglob = extglobs[extglobs.length - 1];
if (extglob && state.parens === extglob.parens + 1) {
extglobClose(extglobs.pop());
continue;
}
push({ type: 'paren', value, output: state.parens ? ')' : '\\)' });
decrement('parens');
continue;
}
/**
* Square brackets
*/
if (value === '[') {
if (opts.nobracket === true || !remaining().includes(']')) {
if (opts.nobracket !== true && opts.strictBrackets === true) {
throw new SyntaxError(syntaxError('closing', ']'));
}
value = `\\${value}`;
} else {
increment('brackets');
}
push({ type: 'bracket', value });
continue;
}
if (value === ']') {
if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) {
push({ type: 'text', value, output: `\\${value}` });
continue;
}
if (state.brackets === 0) {
if (opts.strictBrackets === true) {
throw new SyntaxError(syntaxError('opening', '['));
}
push({ type: 'text', value, output: `\\${value}` });
continue;
}
decrement('brackets');
const prevValue = prev.value.slice(1);
if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) {
value = `/${value}`;
}
prev.value += value;
append({ value });
// when literal brackets are explicitly disabled
// assume we should match with a regex character class
if (opts.literalBrackets === false || utils$i.hasRegexChars(prevValue)) {
continue;
}
const escaped = utils$i.escapeRegex(prev.value);
state.output = state.output.slice(0, -prev.value.length);
// when literal brackets are explicitly enabled
// assume we should escape the brackets to match literal characters
if (opts.literalBrackets === true) {
state.output += escaped;
prev.value = escaped;
continue;
}
// when the user specifies nothing, try to match both
prev.value = `(${capture}${escaped}|${prev.value})`;
state.output += prev.value;
continue;
}
/**
* Braces
*/
if (value === '{' && opts.nobrace !== true) {
increment('braces');
const open = {
type: 'brace',
value,
output: '(',
outputIndex: state.output.length,
tokensIndex: state.tokens.length
};
braces.push(open);
push(open);
continue;
}
if (value === '}') {
const brace = braces[braces.length - 1];
if (opts.nobrace === true || !brace) {
push({ type: 'text', value, output: value });
continue;
}
let output = ')';
if (brace.dots === true) {
const arr = tokens.slice();
const range = [];
for (let i = arr.length - 1; i >= 0; i--) {
tokens.pop();
if (arr[i].type === 'brace') {
break;
}
if (arr[i].type !== 'dots') {
range.unshift(arr[i].value);
}
}
output = expandRange(range, opts);
state.backtrack = true;
}
if (brace.comma !== true && brace.dots !== true) {
const out = state.output.slice(0, brace.outputIndex);
const toks = state.tokens.slice(brace.tokensIndex);
brace.value = brace.output = '\\{';
value = output = '\\}';
state.output = out;
for (const t of toks) {
state.output += (t.output || t.value);
}
}
push({ type: 'brace', value, output });
decrement('braces');
braces.pop();
continue;
}
/**
* Pipes
*/
if (value === '|') {
if (extglobs.length > 0) {
extglobs[extglobs.length - 1].conditions++;
}
push({ type: 'text', value });
continue;
}
/**
* Commas
*/
if (value === ',') {
let output = value;
const brace = braces[braces.length - 1];
if (brace && stack[stack.length - 1] === 'braces') {
brace.comma = true;
output = '|';
}
push({ type: 'comma', value, output });
continue;
}
/**
* Slashes
*/
if (value === '/') {
// if the beginning of the glob is "./", advance the start
// to the current index, and don't add the "./" characters
// to the state. This greatly simplifies lookbehinds when
// checking for BOS characters like "!" and "." (not "./")
if (prev.type === 'dot' && state.index === state.start + 1) {
state.start = state.index + 1;
state.consumed = '';
state.output = '';
tokens.pop();
prev = bos; // reset "prev" to the first token
continue;
}
push({ type: 'slash', value, output: SLASH_LITERAL });
continue;
}
/**
* Dots
*/
if (value === '.') {
if (state.braces > 0 && prev.type === 'dot') {
if (prev.value === '.') prev.output = DOT_LITERAL;
const brace = braces[braces.length - 1];
prev.type = 'dots';
prev.output += value;
prev.value += value;
brace.dots = true;
continue;
}
if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') {
push({ type: 'text', value, output: DOT_LITERAL });
continue;
}
push({ type: 'dot', value, output: DOT_LITERAL });
continue;
}
/**
* Question marks
*/
if (value === '?') {
const isGroup = prev && prev.value === '(';
if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
extglobOpen('qmark', value);
continue;
}
if (prev && prev.type === 'paren') {
const next = peek();
let output = value;
if (next === '<' && !utils$i.supportsLookbehinds()) {
throw new Error('Node.js v10 or higher is required for regex lookbehinds');
}
if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) {
output = `\\${value}`;
}
push({ type: 'text', value, output });
continue;
}
if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) {
push({ type: 'qmark', value, output: QMARK_NO_DOT });
continue;
}
push({ type: 'qmark', value, output: QMARK });
continue;
}
/**
* Exclamation
*/
if (value === '!') {
if (opts.noextglob !== true && peek() === '(') {
if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) {
extglobOpen('negate', value);
continue;
}
}
if (opts.nonegate !== true && state.index === 0) {
negate();
continue;
}
}
/**
* Plus
*/
if (value === '+') {
if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
extglobOpen('plus', value);
continue;
}
if ((prev && prev.value === '(') || opts.regex === false) {
push({ type: 'plus', value, output: PLUS_LITERAL });
continue;
}
if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) {
push({ type: 'plus', value });
continue;
}
push({ type: 'plus', value: PLUS_LITERAL });
continue;
}
/**
* Plain text
*/
if (value === '@') {
if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
push({ type: 'at', extglob: true, value, output: '' });
continue;
}
push({ type: 'text', value });
continue;
}
/**
* Plain text
*/
if (value !== '*') {
if (value === '$' || value === '^') {
value = `\\${value}`;
}
const match = REGEX_NON_SPECIAL_CHARS.exec(remaining());
if (match) {
value += match[0];
state.index += match[0].length;
}
push({ type: 'text', value });
continue;
}
/**
* Stars
*/
if (prev && (prev.type === 'globstar' || prev.star === true)) {
prev.type = 'star';
prev.star = true;
prev.value += value;
prev.output = star;
state.backtrack = true;
state.globstar = true;
consume(value);
continue;
}
let rest = remaining();
if (opts.noextglob !== true && /^\([^?]/.test(rest)) {
extglobOpen('star', value);
continue;
}
if (prev.type === 'star') {
if (opts.noglobstar === true) {
consume(value);
continue;
}
const prior = prev.prev;
const before = prior.prev;
const isStart = prior.type === 'slash' || prior.type === 'bos';
const afterStar = before && (before.type === 'star' || before.type === 'globstar');
if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) {
push({ type: 'star', value, output: '' });
continue;
}
const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace');
const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren');
if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) {
push({ type: 'star', value, output: '' });
continue;
}
// strip consecutive `/**/`
while (rest.slice(0, 3) === '/**') {
const after = input[state.index + 4];
if (after && after !== '/') {
break;
}
rest = rest.slice(3);
consume('/**', 3);
}
if (prior.type === 'bos' && eos()) {
prev.type = 'globstar';
prev.value += value;
prev.output = globstar(opts);
state.output = prev.output;
state.globstar = true;
consume(value);
continue;
}
if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) {
state.output = state.output.slice(0, -(prior.output + prev.output).length);
prior.output = `(?:${prior.output}`;
prev.type = 'globstar';
prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)');
prev.value += value;
state.globstar = true;
state.output += prior.output + prev.output;
consume(value);
continue;
}
if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') {
const end = rest[1] !== void 0 ? '|$' : '';
state.output = state.output.slice(0, -(prior.output + prev.output).length);
prior.output = `(?:${prior.output}`;
prev.type = 'globstar';
prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`;
prev.value += value;
state.output += prior.output + prev.output;
state.globstar = true;
consume(value + advance());
push({ type: 'slash', value: '/', output: '' });
continue;
}
if (prior.type === 'bos' && rest[0] === '/') {
prev.type = 'globstar';
prev.value += value;
prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`;
state.output = prev.output;
state.globstar = true;
consume(value + advance());
push({ type: 'slash', value: '/', output: '' });
continue;
}
// remove single star from output
state.output = state.output.slice(0, -prev.output.length);
// reset previous token to globstar
prev.type = 'globstar';
prev.output = globstar(opts);
prev.value += value;
// reset output with globstar
state.output += prev.output;
state.globstar = true;
consume(value);
continue;
}
const token = { type: 'star', value, output: star };
if (opts.bash === true) {
token.output = '.*?';
if (prev.type === 'bos' || prev.type === 'slash') {
token.output = nodot + token.output;
}
push(token);
continue;
}
if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) {
token.output = value;
push(token);
continue;
}
if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') {
if (prev.type === 'dot') {
state.output += NO_DOT_SLASH;
prev.output += NO_DOT_SLASH;
} else if (opts.dot === true) {
state.output += NO_DOTS_SLASH;
prev.output += NO_DOTS_SLASH;
} else {
state.output += nodot;
prev.output += nodot;
}
if (peek() !== '*') {
state.output += ONE_CHAR;
prev.output += ONE_CHAR;
}
}
push(token);
}
while (state.brackets > 0) {
if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']'));
state.output = utils$i.escapeLast(state.output, '[');
decrement('brackets');
}
while (state.parens > 0) {
if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')'));
state.output = utils$i.escapeLast(state.output, '(');
decrement('parens');
}
while (state.braces > 0) {
if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}'));
state.output = utils$i.escapeLast(state.output, '{');
decrement('braces');
}
if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) {
push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` });
}
// rebuild the output if we had to backtrack at any point
if (state.backtrack === true) {
state.output = '';
for (const token of state.tokens) {
state.output += token.output != null ? token.output : token.value;
if (token.suffix) {
state.output += token.suffix;
}
}
}
return state;
};
/**
* Fast paths for creating regular expressions for common glob patterns.
* This can significantly speed up processing and has very little downside
* impact when none of the fast paths match.
*/
parse$g.fastpaths = (input, options) => {
const opts = { ...options };
const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH$1, opts.maxLength) : MAX_LENGTH$1;
const len = input.length;
if (len > max) {
throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
}
input = REPLACEMENTS[input] || input;
const win32 = utils$i.isWindows(options);
// create constants based on platform, for windows or posix
const {
DOT_LITERAL,
SLASH_LITERAL,
ONE_CHAR,
DOTS_SLASH,
NO_DOT,
NO_DOTS,
NO_DOTS_SLASH,
STAR,
START_ANCHOR
} = constants$5.globChars(win32);
const nodot = opts.dot ? NO_DOTS : NO_DOT;
const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT;
const capture = opts.capture ? '' : '?:';
const state = { negated: false, prefix: '' };
let star = opts.bash === true ? '.*?' : STAR;
if (opts.capture) {
star = `(${star})`;
}
const globstar = opts => {
if (opts.noglobstar === true) return star;
return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;
};
const create = str => {
switch (str) {
case '*':
return `${nodot}${ONE_CHAR}${star}`;
case '.*':
return `${DOT_LITERAL}${ONE_CHAR}${star}`;
case '*.*':
return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;
case '*/*':
return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`;
case '**':
return nodot + globstar(opts);
case '**/*':
return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`;
case '**/*.*':
return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;
case '**/.*':
return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`;
default: {
const match = /^(.*?)\.(\w+)$/.exec(str);
if (!match) return;
const source = create(match[1]);
if (!source) return;
return source + DOT_LITERAL + match[2];
}
}
};
const output = utils$i.removePrefix(input, state);
let source = create(output);
if (source && opts.strictSlashes !== true) {
source += `${SLASH_LITERAL}?`;
}
return source;
};
var parse_1$3 = parse$g;
const path$l = require$$0$4;
const scan$1 = scan_1;
const parse$f = parse_1$3;
const utils$h = utils$k;
const constants$4 = constants$6;
const isObject$3 = val => val && typeof val === 'object' && !Array.isArray(val);
/**
* Creates a matcher function from one or more glob patterns. The
* returned function takes a string to match as its first argument,
* and returns true if the string is a match. The returned matcher
* function also takes a boolean as the second argument that, when true,
* returns an object with additional information.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch(glob[, options]);
*
* const isMatch = picomatch('*.!(*a)');
* console.log(isMatch('a.a')); //=> false
* console.log(isMatch('a.b')); //=> true
* ```
* @name picomatch
* @param {String|Array} `globs` One or more glob patterns.
* @param {Object=} `options`
* @return {Function=} Returns a matcher function.
* @api public
*/
const picomatch$5 = (glob, options, returnState = false) => {
if (Array.isArray(glob)) {
const fns = glob.map(input => picomatch$5(input, options, returnState));
const arrayMatcher = str => {
for (const isMatch of fns) {
const state = isMatch(str);
if (state) return state;
}
return false;
};
return arrayMatcher;
}
const isState = isObject$3(glob) && glob.tokens && glob.input;
if (glob === '' || (typeof glob !== 'string' && !isState)) {
throw new TypeError('Expected pattern to be a non-empty string');
}
const opts = options || {};
const posix = utils$h.isWindows(options);
const regex = isState
? picomatch$5.compileRe(glob, options)
: picomatch$5.makeRe(glob, options, false, true);
const state = regex.state;
delete regex.state;
let isIgnored = () => false;
if (opts.ignore) {
const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };
isIgnored = picomatch$5(opts.ignore, ignoreOpts, returnState);
}
const matcher = (input, returnObject = false) => {
const { isMatch, match, output } = picomatch$5.test(input, regex, options, { glob, posix });
const result = { glob, state, regex, posix, input, output, match, isMatch };
if (typeof opts.onResult === 'function') {
opts.onResult(result);
}
if (isMatch === false) {
result.isMatch = false;
return returnObject ? result : false;
}
if (isIgnored(input)) {
if (typeof opts.onIgnore === 'function') {
opts.onIgnore(result);
}
result.isMatch = false;
return returnObject ? result : false;
}
if (typeof opts.onMatch === 'function') {
opts.onMatch(result);
}
return returnObject ? result : true;
};
if (returnState) {
matcher.state = state;
}
return matcher;
};
/**
* Test `input` with the given `regex`. This is used by the main
* `picomatch()` function to test the input string.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.test(input, regex[, options]);
*
* console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/));
* // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }
* ```
* @param {String} `input` String to test.
* @param {RegExp} `regex`
* @return {Object} Returns an object with matching info.
* @api public
*/
picomatch$5.test = (input, regex, options, { glob, posix } = {}) => {
if (typeof input !== 'string') {
throw new TypeError('Expected input to be a string');
}
if (input === '') {
return { isMatch: false, output: '' };
}
const opts = options || {};
const format = opts.format || (posix ? utils$h.toPosixSlashes : null);
let match = input === glob;
let output = (match && format) ? format(input) : input;
if (match === false) {
output = format ? format(input) : input;
match = output === glob;
}
if (match === false || opts.capture === true) {
if (opts.matchBase === true || opts.basename === true) {
match = picomatch$5.matchBase(input, regex, options, posix);
} else {
match = regex.exec(output);
}
}
return { isMatch: Boolean(match), match, output };
};
/**
* Match the basename of a filepath.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.matchBase(input, glob[, options]);
* console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true
* ```
* @param {String} `input` String to test.
* @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe).
* @return {Boolean}
* @api public
*/
picomatch$5.matchBase = (input, glob, options, posix = utils$h.isWindows(options)) => {
const regex = glob instanceof RegExp ? glob : picomatch$5.makeRe(glob, options);
return regex.test(path$l.basename(input));
};
/**
* Returns true if **any** of the given glob `patterns` match the specified `string`.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.isMatch(string, patterns[, options]);
*
* console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true
* console.log(picomatch.isMatch('a.a', 'b.*')); //=> false
* ```
* @param {String|Array} str The string to test.
* @param {String|Array} patterns One or more glob patterns to use for matching.
* @param {Object} [options] See available [options](#options).
* @return {Boolean} Returns true if any patterns match `str`
* @api public
*/
picomatch$5.isMatch = (str, patterns, options) => picomatch$5(patterns, options)(str);
/**
* Parse a glob pattern to create the source string for a regular
* expression.
*
* ```js
* const picomatch = require('picomatch');
* const result = picomatch.parse(pattern[, options]);
* ```
* @param {String} `pattern`
* @param {Object} `options`
* @return {Object} Returns an object with useful properties and output to be used as a regex source string.
* @api public
*/
picomatch$5.parse = (pattern, options) => {
if (Array.isArray(pattern)) return pattern.map(p => picomatch$5.parse(p, options));
return parse$f(pattern, { ...options, fastpaths: false });
};
/**
* Scan a glob pattern to separate the pattern into segments.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.scan(input[, options]);
*
* const result = picomatch.scan('!./foo/*.js');
* console.log(result);
* { prefix: '!./',
* input: '!./foo/*.js',
* start: 3,
* base: 'foo',
* glob: '*.js',
* isBrace: false,
* isBracket: false,
* isGlob: true,
* isExtglob: false,
* isGlobstar: false,
* negated: true }
* ```
* @param {String} `input` Glob pattern to scan.
* @param {Object} `options`
* @return {Object} Returns an object with
* @api public
*/
picomatch$5.scan = (input, options) => scan$1(input, options);
/**
* Compile a regular expression from the `state` object returned by the
* [parse()](#parse) method.
*
* @param {Object} `state`
* @param {Object} `options`
* @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser.
* @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging.
* @return {RegExp}
* @api public
*/
picomatch$5.compileRe = (state, options, returnOutput = false, returnState = false) => {
if (returnOutput === true) {
return state.output;
}
const opts = options || {};
const prepend = opts.contains ? '' : '^';
const append = opts.contains ? '' : '$';
let source = `${prepend}(?:${state.output})${append}`;
if (state && state.negated === true) {
source = `^(?!${source}).*$`;
}
const regex = picomatch$5.toRegex(source, options);
if (returnState === true) {
regex.state = state;
}
return regex;
};
/**
* Create a regular expression from a parsed glob pattern.
*
* ```js
* const picomatch = require('picomatch');
* const state = picomatch.parse('*.js');
* // picomatch.compileRe(state[, options]);
*
* console.log(picomatch.compileRe(state));
* //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
* ```
* @param {String} `state` The object returned from the `.parse` method.
* @param {Object} `options`
* @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result.
* @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression.
* @return {RegExp} Returns a regex created from the given pattern.
* @api public
*/
picomatch$5.makeRe = (input, options = {}, returnOutput = false, returnState = false) => {
if (!input || typeof input !== 'string') {
throw new TypeError('Expected a non-empty string');
}
let parsed = { negated: false, fastpaths: true };
if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) {
parsed.output = parse$f.fastpaths(input, options);
}
if (!parsed.output) {
parsed = parse$f(input, options);
}
return picomatch$5.compileRe(parsed, options, returnOutput, returnState);
};
/**
* Create a regular expression from the given regex source string.
*
* ```js
* const picomatch = require('picomatch');
* // picomatch.toRegex(source[, options]);
*
* const { output } = picomatch.parse('*.js');
* console.log(picomatch.toRegex(output));
* //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
* ```
* @param {String} `source` Regular expression source string.
* @param {Object} `options`
* @return {RegExp}
* @api public
*/
picomatch$5.toRegex = (source, options) => {
try {
const opts = options || {};
return new RegExp(source, opts.flags || (opts.nocase ? 'i' : ''));
} catch (err) {
if (options && options.debug === true) throw err;
return /$^/;
}
};
/**
* Picomatch constants.
* @return {Object}
*/
picomatch$5.constants = constants$4;
/**
* Expose "picomatch"
*/
var picomatch_1 = picomatch$5;
var picomatch$3 = picomatch_1;
var picomatch$4 = /*@__PURE__*/getDefaultExportFromCjs(picomatch$3);
const extractors = {
ArrayPattern(names, param) {
for (const element of param.elements) {
if (element)
extractors[element.type](names, element);
}
},
AssignmentPattern(names, param) {
extractors[param.left.type](names, param.left);
},
Identifier(names, param) {
names.push(param.name);
},
MemberExpression() { },
ObjectPattern(names, param) {
for (const prop of param.properties) {
// @ts-ignore Typescript reports that this is not a valid type
if (prop.type === 'RestElement') {
extractors.RestElement(names, prop);
}
else {
extractors[prop.value.type](names, prop.value);
}
}
},
RestElement(names, param) {
extractors[param.argument.type](names, param.argument);
}
};
const extractAssignedNames = function extractAssignedNames(param) {
const names = [];
extractors[param.type](names, param);
return names;
};
const blockDeclarations = {
const: true,
let: true
};
class Scope {
constructor(options = {}) {
this.parent = options.parent;
this.isBlockScope = !!options.block;
this.declarations = Object.create(null);
if (options.params) {
options.params.forEach((param) => {
extractAssignedNames(param).forEach((name) => {
this.declarations[name] = true;
});
});
}
}
addDeclaration(node, isBlockDeclaration, isVar) {
if (!isBlockDeclaration && this.isBlockScope) {
// it's a `var` or function node, and this
// is a block scope, so we need to go up
this.parent.addDeclaration(node, isBlockDeclaration, isVar);
}
else if (node.id) {
extractAssignedNames(node.id).forEach((name) => {
this.declarations[name] = true;
});
}
}
contains(name) {
return this.declarations[name] || (this.parent ? this.parent.contains(name) : false);
}
}
const attachScopes = function attachScopes(ast, propertyName = 'scope') {
let scope = new Scope();
walk$3(ast, {
enter(n, parent) {
const node = n;
// function foo () {...}
// class Foo {...}
if (/(Function|Class)Declaration/.test(node.type)) {
scope.addDeclaration(node, false, false);
}
// var foo = 1
if (node.type === 'VariableDeclaration') {
const { kind } = node;
const isBlockDeclaration = blockDeclarations[kind];
node.declarations.forEach((declaration) => {
scope.addDeclaration(declaration, isBlockDeclaration, true);
});
}
let newScope;
// create new function scope
if (/Function/.test(node.type)) {
const func = node;
newScope = new Scope({
parent: scope,
block: false,
params: func.params
});
// named function expressions - the name is considered
// part of the function's scope
if (func.type === 'FunctionExpression' && func.id) {
newScope.addDeclaration(func, false, false);
}
}
// create new for scope
if (/For(In|Of)?Statement/.test(node.type)) {
newScope = new Scope({
parent: scope,
block: true
});
}
// create new block scope
if (node.type === 'BlockStatement' && !/Function/.test(parent.type)) {
newScope = new Scope({
parent: scope,
block: true
});
}
// catch clause has its own block scope
if (node.type === 'CatchClause') {
newScope = new Scope({
parent: scope,
params: node.param ? [node.param] : [],
block: true
});
}
if (newScope) {
Object.defineProperty(node, propertyName, {
value: newScope,
configurable: true
});
scope = newScope;
}
},
leave(n) {
const node = n;
if (node[propertyName])
scope = scope.parent;
}
});
return scope;
};
// Helper since Typescript can't detect readonly arrays with Array.isArray
function isArray(arg) {
return Array.isArray(arg);
}
function ensureArray(thing) {
if (isArray(thing))
return thing;
if (thing == null)
return [];
return [thing];
}
const normalizePath$5 = function normalizePath(filename) {
return filename.split(win32.sep).join(posix.sep);
};
function getMatcherString(id, resolutionBase) {
if (resolutionBase === false || isAbsolute(id) || id.startsWith('**')) {
return normalizePath$5(id);
}
// resolve('') is valid and will default to process.cwd()
const basePath = normalizePath$5(resolve$3(resolutionBase || ''))
// escape all possible (posix + win) path characters that might interfere with regex
.replace(/[-^$*+?.()|[\]{}]/g, '\\$&');
// Note that we use posix.join because:
// 1. the basePath has been normalized to use /
// 2. the incoming glob (id) matcher, also uses /
// otherwise Node will force backslash (\) on windows
return posix.join(basePath, normalizePath$5(id));
}
const createFilter$1 = function createFilter(include, exclude, options) {
const resolutionBase = options && options.resolve;
const getMatcher = (id) => id instanceof RegExp
? id
: {
test: (what) => {
// this refactor is a tad overly verbose but makes for easy debugging
const pattern = getMatcherString(id, resolutionBase);
const fn = picomatch$4(pattern, { dot: true });
const result = fn(what);
return result;
}
};
const includeMatchers = ensureArray(include).map(getMatcher);
const excludeMatchers = ensureArray(exclude).map(getMatcher);
return function result(id) {
if (typeof id !== 'string')
return false;
if (/\0/.test(id))
return false;
const pathId = normalizePath$5(id);
for (let i = 0; i < excludeMatchers.length; ++i) {
const matcher = excludeMatchers[i];
if (matcher.test(pathId))
return false;
}
for (let i = 0; i < includeMatchers.length; ++i) {
const matcher = includeMatchers[i];
if (matcher.test(pathId))
return true;
}
return !includeMatchers.length;
};
};
const reservedWords = 'break case class catch const continue debugger default delete do else export extends finally for function if import in instanceof let new return super switch this throw try typeof var void while with yield enum await implements package protected static interface private public';
const builtins = 'arguments Infinity NaN undefined null true false eval uneval isFinite isNaN parseFloat parseInt decodeURI decodeURIComponent encodeURI encodeURIComponent escape unescape Object Function Boolean Symbol Error EvalError InternalError RangeError ReferenceError SyntaxError TypeError URIError Number Math Date String RegExp Array Int8Array Uint8Array Uint8ClampedArray Int16Array Uint16Array Int32Array Uint32Array Float32Array Float64Array Map Set WeakMap WeakSet SIMD ArrayBuffer DataView JSON Promise Generator GeneratorFunction Reflect Proxy Intl';
const forbiddenIdentifiers = new Set(`${reservedWords} ${builtins}`.split(' '));
forbiddenIdentifiers.add('');
const makeLegalIdentifier = function makeLegalIdentifier(str) {
let identifier = str
.replace(/-(\w)/g, (_, letter) => letter.toUpperCase())
.replace(/[^$_a-zA-Z0-9]/g, '_');
if (/\d/.test(identifier[0]) || forbiddenIdentifiers.has(identifier)) {
identifier = `_${identifier}`;
}
return identifier || '_';
};
function stringify$8(obj) {
return (JSON.stringify(obj) || 'undefined').replace(/[\u2028\u2029]/g, (char) => `\\u${`000${char.charCodeAt(0).toString(16)}`.slice(-4)}`);
}
function serializeArray(arr, indent, baseIndent) {
let output = '[';
const separator = indent ? `\n${baseIndent}${indent}` : '';
for (let i = 0; i < arr.length; i++) {
const key = arr[i];
output += `${i > 0 ? ',' : ''}${separator}${serialize(key, indent, baseIndent + indent)}`;
}
return `${output}${indent ? `\n${baseIndent}` : ''}]`;
}
function serializeObject(obj, indent, baseIndent) {
let output = '{';
const separator = indent ? `\n${baseIndent}${indent}` : '';
const entries = Object.entries(obj);
for (let i = 0; i < entries.length; i++) {
const [key, value] = entries[i];
const stringKey = makeLegalIdentifier(key) === key ? key : stringify$8(key);
output += `${i > 0 ? ',' : ''}${separator}${stringKey}:${indent ? ' ' : ''}${serialize(value, indent, baseIndent + indent)}`;
}
return `${output}${indent ? `\n${baseIndent}` : ''}}`;
}
function serialize(obj, indent, baseIndent) {
if (typeof obj === 'object' && obj !== null) {
if (Array.isArray(obj))
return serializeArray(obj, indent, baseIndent);
if (obj instanceof Date)
return `new Date(${obj.getTime()})`;
if (obj instanceof RegExp)
return obj.toString();
return serializeObject(obj, indent, baseIndent);
}
if (typeof obj === 'number') {
if (obj === Infinity)
return 'Infinity';
if (obj === -Infinity)
return '-Infinity';
if (obj === 0)
return 1 / obj === Infinity ? '0' : '-0';
if (obj !== obj)
return 'NaN'; // eslint-disable-line no-self-compare
}
if (typeof obj === 'symbol') {
const key = Symbol.keyFor(obj);
// eslint-disable-next-line no-undefined
if (key !== undefined)
return `Symbol.for(${stringify$8(key)})`;
}
if (typeof obj === 'bigint')
return `${obj}n`;
return stringify$8(obj);
}
// isWellFormed exists from Node.js 20
const hasStringIsWellFormed = 'isWellFormed' in String.prototype;
function isWellFormedString(input) {
// @ts-expect-error String::isWellFormed exists from ES2024. tsconfig lib is set to ES6
if (hasStringIsWellFormed)
return input.isWellFormed();
// https://github.com/tc39/proposal-is-usv-string/blob/main/README.md#algorithm
return !/\p{Surrogate}/u.test(input);
}
const dataToEsm = function dataToEsm(data, options = {}) {
var _a, _b;
const t = options.compact ? '' : 'indent' in options ? options.indent : '\t';
const _ = options.compact ? '' : ' ';
const n = options.compact ? '' : '\n';
const declarationType = options.preferConst ? 'const' : 'var';
if (options.namedExports === false ||
typeof data !== 'object' ||
Array.isArray(data) ||
data instanceof Date ||
data instanceof RegExp ||
data === null) {
const code = serialize(data, options.compact ? null : t, '');
const magic = _ || (/^[{[\-\/]/.test(code) ? '' : ' '); // eslint-disable-line no-useless-escape
return `export default${magic}${code};`;
}
let maxUnderbarPrefixLength = 0;
for (const key of Object.keys(data)) {
const underbarPrefixLength = (_b = (_a = key.match(/^(_+)/)) === null || _a === void 0 ? void 0 : _a[0].length) !== null && _b !== void 0 ? _b : 0;
if (underbarPrefixLength > maxUnderbarPrefixLength) {
maxUnderbarPrefixLength = underbarPrefixLength;
}
}
const arbitraryNamePrefix = `${'_'.repeat(maxUnderbarPrefixLength + 1)}arbitrary`;
let namedExportCode = '';
const defaultExportRows = [];
const arbitraryNameExportRows = [];
for (const [key, value] of Object.entries(data)) {
if (key === makeLegalIdentifier(key)) {
if (options.objectShorthand)
defaultExportRows.push(key);
else
defaultExportRows.push(`${key}:${_}${key}`);
namedExportCode += `export ${declarationType} ${key}${_}=${_}${serialize(value, options.compact ? null : t, '')};${n}`;
}
else {
defaultExportRows.push(`${stringify$8(key)}:${_}${serialize(value, options.compact ? null : t, '')}`);
if (options.includeArbitraryNames && isWellFormedString(key)) {
const variableName = `${arbitraryNamePrefix}${arbitraryNameExportRows.length}`;
namedExportCode += `${declarationType} ${variableName}${_}=${_}${serialize(value, options.compact ? null : t, '')};${n}`;
arbitraryNameExportRows.push(`${variableName} as ${JSON.stringify(key)}`);
}
}
}
const arbitraryExportCode = arbitraryNameExportRows.length > 0
? `export${_}{${n}${t}${arbitraryNameExportRows.join(`,${n}${t}`)}${n}};${n}`
: '';
const defaultExportCode = `export default${_}{${n}${t}${defaultExportRows.join(`,${n}${t}`)}${n}};${n}`;
return `${namedExportCode}${arbitraryExportCode}${defaultExportCode}`;
};
var path$k = require$$0$4;
var commondir = function (basedir, relfiles) {
if (relfiles) {
var files = relfiles.map(function (r) {
return path$k.resolve(basedir, r);
});
}
else {
var files = basedir;
}
var res = files.slice(1).reduce(function (ps, file) {
if (!file.match(/^([A-Za-z]:)?\/|\\/)) {
throw new Error('relative path without a basedir');
}
var xs = file.split(/\/+|\\+/);
for (
var i = 0;
ps[i] === xs[i] && i < Math.min(ps.length, xs.length);
i++
);
return ps.slice(0, i);
}, files[0].split(/\/+|\\+/));
// Windows correctly handles paths with forward-slashes
return res.length > 1 ? res.join('/') : '/'
};
var getCommonDir = /*@__PURE__*/getDefaultExportFromCjs(commondir);
var balancedMatch = balanced$1;
function balanced$1(a, b, str) {
if (a instanceof RegExp) a = maybeMatch(a, str);
if (b instanceof RegExp) b = maybeMatch(b, str);
var r = range$1(a, b, str);
return r && {
start: r[0],
end: r[1],
pre: str.slice(0, r[0]),
body: str.slice(r[0] + a.length, r[1]),
post: str.slice(r[1] + b.length)
};
}
function maybeMatch(reg, str) {
var m = str.match(reg);
return m ? m[0] : null;
}
balanced$1.range = range$1;
function range$1(a, b, str) {
var begs, beg, left, right, result;
var ai = str.indexOf(a);
var bi = str.indexOf(b, ai + 1);
var i = ai;
if (ai >= 0 && bi > 0) {
if(a===b) {
return [ai, bi];
}
begs = [];
left = str.length;
while (i >= 0 && !result) {
if (i == ai) {
begs.push(i);
ai = str.indexOf(a, i + 1);
} else if (begs.length == 1) {
result = [ begs.pop(), bi ];
} else {
beg = begs.pop();
if (beg < left) {
left = beg;
right = bi;
}
bi = str.indexOf(b, i + 1);
}
i = ai < bi && ai >= 0 ? ai : bi;
}
if (begs.length) {
result = [ left, right ];
}
}
return result;
}
var balanced = balancedMatch;
var braceExpansion = expandTop;
var escSlash = '\0SLASH'+Math.random()+'\0';
var escOpen = '\0OPEN'+Math.random()+'\0';
var escClose = '\0CLOSE'+Math.random()+'\0';
var escComma = '\0COMMA'+Math.random()+'\0';
var escPeriod = '\0PERIOD'+Math.random()+'\0';
function numeric(str) {
return parseInt(str, 10) == str
? parseInt(str, 10)
: str.charCodeAt(0);
}
function escapeBraces(str) {
return str.split('\\\\').join(escSlash)
.split('\\{').join(escOpen)
.split('\\}').join(escClose)
.split('\\,').join(escComma)
.split('\\.').join(escPeriod);
}
function unescapeBraces(str) {
return str.split(escSlash).join('\\')
.split(escOpen).join('{')
.split(escClose).join('}')
.split(escComma).join(',')
.split(escPeriod).join('.');
}
// Basically just str.split(","), but handling cases
// where we have nested braced sections, which should be
// treated as individual members, like {a,{b,c},d}
function parseCommaParts(str) {
if (!str)
return [''];
var parts = [];
var m = balanced('{', '}', str);
if (!m)
return str.split(',');
var pre = m.pre;
var body = m.body;
var post = m.post;
var p = pre.split(',');
p[p.length-1] += '{' + body + '}';
var postParts = parseCommaParts(post);
if (post.length) {
p[p.length-1] += postParts.shift();
p.push.apply(p, postParts);
}
parts.push.apply(parts, p);
return parts;
}
function expandTop(str) {
if (!str)
return [];
// I don't know why Bash 4.3 does this, but it does.
// Anything starting with {} will have the first two bytes preserved
// but *only* at the top level, so {},a}b will not expand to anything,
// but a{},b}c will be expanded to [a}c,abc].
// One could argue that this is a bug in Bash, but since the goal of
// this module is to match Bash's rules, we escape a leading {}
if (str.substr(0, 2) === '{}') {
str = '\\{\\}' + str.substr(2);
}
return expand$3(escapeBraces(str), true).map(unescapeBraces);
}
function embrace(str) {
return '{' + str + '}';
}
function isPadded(el) {
return /^-?0\d/.test(el);
}
function lte(i, y) {
return i <= y;
}
function gte(i, y) {
return i >= y;
}
function expand$3(str, isTop) {
var expansions = [];
var m = balanced('{', '}', str);
if (!m) return [str];
// no need to expand pre, since it is guaranteed to be free of brace-sets
var pre = m.pre;
var post = m.post.length
? expand$3(m.post, false)
: [''];
if (/\$$/.test(m.pre)) {
for (var k = 0; k < post.length; k++) {
var expansion = pre+ '{' + m.body + '}' + post[k];
expansions.push(expansion);
}
} else {
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
var isSequence = isNumericSequence || isAlphaSequence;
var isOptions = m.body.indexOf(',') >= 0;
if (!isSequence && !isOptions) {
// {a},b}
if (m.post.match(/,.*\}/)) {
str = m.pre + '{' + m.body + escClose + m.post;
return expand$3(str);
}
return [str];
}
var n;
if (isSequence) {
n = m.body.split(/\.\./);
} else {
n = parseCommaParts(m.body);
if (n.length === 1) {
// x{{a,b}}y ==> x{a}y x{b}y
n = expand$3(n[0], false).map(embrace);
if (n.length === 1) {
return post.map(function(p) {
return m.pre + n[0] + p;
});
}
}
}
// at this point, n is the parts, and we know it's not a comma set
// with a single entry.
var N;
if (isSequence) {
var x = numeric(n[0]);
var y = numeric(n[1]);
var width = Math.max(n[0].length, n[1].length);
var incr = n.length == 3
? Math.abs(numeric(n[2]))
: 1;
var test = lte;
var reverse = y < x;
if (reverse) {
incr *= -1;
test = gte;
}
var pad = n.some(isPadded);
N = [];
for (var i = x; test(i, y); i += incr) {
var c;
if (isAlphaSequence) {
c = String.fromCharCode(i);
if (c === '\\')
c = '';
} else {
c = String(i);
if (pad) {
var need = width - c.length;
if (need > 0) {
var z = new Array(need + 1).join('0');
if (i < 0)
c = '-' + z + c.slice(1);
else
c = z + c;
}
}
}
N.push(c);
}
} else {
N = [];
for (var j = 0; j < n.length; j++) {
N.push.apply(N, expand$3(n[j], false));
}
}
for (var j = 0; j < N.length; j++) {
for (var k = 0; k < post.length; k++) {
var expansion = pre + N[j] + post[k];
if (!isTop || isSequence || expansion)
expansions.push(expansion);
}
}
}
return expansions;
}
var expand$4 = /*@__PURE__*/getDefaultExportFromCjs(braceExpansion);
const MAX_PATTERN_LENGTH = 1024 * 64;
const assertValidPattern = (pattern) => {
if (typeof pattern !== 'string') {
throw new TypeError('invalid pattern');
}
if (pattern.length > MAX_PATTERN_LENGTH) {
throw new TypeError('pattern is too long');
}
};
// translate the various posix character classes into unicode properties
// this works across all unicode locales
// { <posix class>: [<translation>, /u flag required, negated]
const posixClasses = {
'[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
'[:alpha:]': ['\\p{L}\\p{Nl}', true],
'[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
'[:blank:]': ['\\p{Zs}\\t', true],
'[:cntrl:]': ['\\p{Cc}', true],
'[:digit:]': ['\\p{Nd}', true],
'[:graph:]': ['\\p{Z}\\p{C}', true, true],
'[:lower:]': ['\\p{Ll}', true],
'[:print:]': ['\\p{C}', true],
'[:punct:]': ['\\p{P}', true],
'[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
'[:upper:]': ['\\p{Lu}', true],
'[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
'[:xdigit:]': ['A-Fa-f0-9', false],
};
// only need to escape a few things inside of brace expressions
// escapes: [ \ ] -
const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
// escape all regexp magic characters
const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
// everything has already been escaped, we just have to join
const rangesToString = (ranges) => ranges.join('');
// takes a glob string at a posix brace expression, and returns
// an equivalent regular expression source, and boolean indicating
// whether the /u flag needs to be applied, and the number of chars
// consumed to parse the character class.
// This also removes out of order ranges, and returns ($.) if the
// entire class just no good.
const parseClass = (glob, position) => {
const pos = position;
/* c8 ignore start */
if (glob.charAt(pos) !== '[') {
throw new Error('not in a brace expression');
}
/* c8 ignore stop */
const ranges = [];
const negs = [];
let i = pos + 1;
let sawStart = false;
let uflag = false;
let escaping = false;
let negate = false;
let endPos = pos;
let rangeStart = '';
WHILE: while (i < glob.length) {
const c = glob.charAt(i);
if ((c === '!' || c === '^') && i === pos + 1) {
negate = true;
i++;
continue;
}
if (c === ']' && sawStart && !escaping) {
endPos = i + 1;
break;
}
sawStart = true;
if (c === '\\') {
if (!escaping) {
escaping = true;
i++;
continue;
}
// escaped \ char, fall through and treat like normal char
}
if (c === '[' && !escaping) {
// either a posix class, a collation equivalent, or just a [
for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
if (glob.startsWith(cls, i)) {
// invalid, [a-[] is fine, but not [a-[:alpha]]
if (rangeStart) {
return ['$.', false, glob.length - pos, true];
}
i += cls.length;
if (neg)
negs.push(unip);
else
ranges.push(unip);
uflag = uflag || u;
continue WHILE;
}
}
}
// now it's just a normal character, effectively
escaping = false;
if (rangeStart) {
// throw this range away if it's not valid, but others
// can still match.
if (c > rangeStart) {
ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
}
else if (c === rangeStart) {
ranges.push(braceEscape(c));
}
rangeStart = '';
i++;
continue;
}
// now might be the start of a range.
// can be either c-d or c-] or c<more...>] or c] at this point
if (glob.startsWith('-]', i + 1)) {
ranges.push(braceEscape(c + '-'));
i += 2;
continue;
}
if (glob.startsWith('-', i + 1)) {
rangeStart = c;
i += 2;
continue;
}
// not the start of a range, just a single character
ranges.push(braceEscape(c));
i++;
}
if (endPos < i) {
// didn't see the end of the class, not a valid class,
// but might still be valid as a literal match.
return ['', false, 0, false];
}
// if we got no ranges and no negates, then we have a range that
// cannot possibly match anything, and that poisons the whole glob
if (!ranges.length && !negs.length) {
return ['$.', false, glob.length - pos, true];
}
// if we got one positive range, and it's a single character, then that's
// not actually a magic pattern, it's just that one literal character.
// we should not treat that as "magic", we should just return the literal
// character. [_] is a perfectly valid way to escape glob magic chars.
if (negs.length === 0 &&
ranges.length === 1 &&
/^\\?.$/.test(ranges[0]) &&
!negate) {
const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
return [regexpEscape(r), false, endPos - pos, false];
}
const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
const comb = ranges.length && negs.length
? '(' + sranges + '|' + snegs + ')'
: ranges.length
? sranges
: snegs;
return [comb, uflag, endPos - pos, true];
};
/**
* Un-escape a string that has been escaped with {@link escape}.
*
* If the {@link windowsPathsNoEscape} option is used, then square-brace
* escapes are removed, but not backslash escapes. For example, it will turn
* the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
* becuase `\` is a path separator in `windowsPathsNoEscape` mode.
*
* When `windowsPathsNoEscape` is not set, then both brace escapes and
* backslash escapes are removed.
*
* Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
* or unescaped.
*/
const unescape$1 = (s, { windowsPathsNoEscape = false, } = {}) => {
return windowsPathsNoEscape
? s.replace(/\[([^\/\\])\]/g, '$1')
: s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
};
// parse a single path portion
const types$1 = new Set(['!', '?', '+', '*', '@']);
const isExtglobType = (c) => types$1.has(c);
// Patterns that get prepended to bind to the start of either the
// entire string, or just a single path portion, to prevent dots
// and/or traversal patterns, when needed.
// Exts don't need the ^ or / bit, because the root binds that already.
const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
const startNoDot = '(?!\\.)';
// characters that indicate a start of pattern needs the "no dots" bit,
// because a dot *might* be matched. ( is not in the list, because in
// the case of a child extglob, it will handle the prevention itself.
const addPatternStart = new Set(['[', '.']);
// cases where traversal is A-OK, no dot prevention needed
const justDots = new Set(['..', '.']);
const reSpecials = new Set('().*{}+?[]^$\\!');
const regExpEscape$1 = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
// any single thing other than /
const qmark$1 = '[^/]';
// * => any number of characters
const star$1 = qmark$1 + '*?';
// use + when we need to ensure that *something* matches, because the * is
// the only thing in the path portion.
const starNoEmpty = qmark$1 + '+?';
// remove the \ chars that we added if we end up doing a nonmagic compare
// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
class AST {
type;
#root;
#hasMagic;
#uflag = false;
#parts = [];
#parent;
#parentIndex;
#negs;
#filledNegs = false;
#options;
#toString;
// set to true if it's an extglob with no children
// (which really means one child of '')
#emptyExt = false;
constructor(type, parent, options = {}) {
this.type = type;
// extglobs are inherently magical
if (type)
this.#hasMagic = true;
this.#parent = parent;
this.#root = this.#parent ? this.#parent.#root : this;
this.#options = this.#root === this ? options : this.#root.#options;
this.#negs = this.#root === this ? [] : this.#root.#negs;
if (type === '!' && !this.#root.#filledNegs)
this.#negs.push(this);
this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
}
get hasMagic() {
/* c8 ignore start */
if (this.#hasMagic !== undefined)
return this.#hasMagic;
/* c8 ignore stop */
for (const p of this.#parts) {
if (typeof p === 'string')
continue;
if (p.type || p.hasMagic)
return (this.#hasMagic = true);
}
// note: will be undefined until we generate the regexp src and find out
return this.#hasMagic;
}
// reconstructs the pattern
toString() {
if (this.#toString !== undefined)
return this.#toString;
if (!this.type) {
return (this.#toString = this.#parts.map(p => String(p)).join(''));
}
else {
return (this.#toString =
this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
}
}
#fillNegs() {
/* c8 ignore start */
if (this !== this.#root)
throw new Error('should only call on root');
if (this.#filledNegs)
return this;
/* c8 ignore stop */
// call toString() once to fill this out
this.toString();
this.#filledNegs = true;
let n;
while ((n = this.#negs.pop())) {
if (n.type !== '!')
continue;
// walk up the tree, appending everthing that comes AFTER parentIndex
let p = n;
let pp = p.#parent;
while (pp) {
for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
for (const part of n.#parts) {
/* c8 ignore start */
if (typeof part === 'string') {
throw new Error('string part in extglob AST??');
}
/* c8 ignore stop */
part.copyIn(pp.#parts[i]);
}
}
p = pp;
pp = p.#parent;
}
}
return this;
}
push(...parts) {
for (const p of parts) {
if (p === '')
continue;
/* c8 ignore start */
if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
throw new Error('invalid part: ' + p);
}
/* c8 ignore stop */
this.#parts.push(p);
}
}
toJSON() {
const ret = this.type === null
? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
: [this.type, ...this.#parts.map(p => p.toJSON())];
if (this.isStart() && !this.type)
ret.unshift([]);
if (this.isEnd() &&
(this === this.#root ||
(this.#root.#filledNegs && this.#parent?.type === '!'))) {
ret.push({});
}
return ret;
}
isStart() {
if (this.#root === this)
return true;
// if (this.type) return !!this.#parent?.isStart()
if (!this.#parent?.isStart())
return false;
if (this.#parentIndex === 0)
return true;
// if everything AHEAD of this is a negation, then it's still the "start"
const p = this.#parent;
for (let i = 0; i < this.#parentIndex; i++) {
const pp = p.#parts[i];
if (!(pp instanceof AST && pp.type === '!')) {
return false;
}
}
return true;
}
isEnd() {
if (this.#root === this)
return true;
if (this.#parent?.type === '!')
return true;
if (!this.#parent?.isEnd())
return false;
if (!this.type)
return this.#parent?.isEnd();
// if not root, it'll always have a parent
/* c8 ignore start */
const pl = this.#parent ? this.#parent.#parts.length : 0;
/* c8 ignore stop */
return this.#parentIndex === pl - 1;
}
copyIn(part) {
if (typeof part === 'string')
this.push(part);
else
this.push(part.clone(this));
}
clone(parent) {
const c = new AST(this.type, parent);
for (const p of this.#parts) {
c.copyIn(p);
}
return c;
}
static #parseAST(str, ast, pos, opt) {
let escaping = false;
let inBrace = false;
let braceStart = -1;
let braceNeg = false;
if (ast.type === null) {
// outside of a extglob, append until we find a start
let i = pos;
let acc = '';
while (i < str.length) {
const c = str.charAt(i++);
// still accumulate escapes at this point, but we do ignore
// starts that are escaped
if (escaping || c === '\\') {
escaping = !escaping;
acc += c;
continue;
}
if (inBrace) {
if (i === braceStart + 1) {
if (c === '^' || c === '!') {
braceNeg = true;
}
}
else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
inBrace = false;
}
acc += c;
continue;
}
else if (c === '[') {
inBrace = true;
braceStart = i;
braceNeg = false;
acc += c;
continue;
}
if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
ast.push(acc);
acc = '';
const ext = new AST(c, ast);
i = AST.#parseAST(str, ext, i, opt);
ast.push(ext);
continue;
}
acc += c;
}
ast.push(acc);
return i;
}
// some kind of extglob, pos is at the (
// find the next | or )
let i = pos + 1;
let part = new AST(null, ast);
const parts = [];
let acc = '';
while (i < str.length) {
const c = str.charAt(i++);
// still accumulate escapes at this point, but we do ignore
// starts that are escaped
if (escaping || c === '\\') {
escaping = !escaping;
acc += c;
continue;
}
if (inBrace) {
if (i === braceStart + 1) {
if (c === '^' || c === '!') {
braceNeg = true;
}
}
else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
inBrace = false;
}
acc += c;
continue;
}
else if (c === '[') {
inBrace = true;
braceStart = i;
braceNeg = false;
acc += c;
continue;
}
if (isExtglobType(c) && str.charAt(i) === '(') {
part.push(acc);
acc = '';
const ext = new AST(c, part);
part.push(ext);
i = AST.#parseAST(str, ext, i, opt);
continue;
}
if (c === '|') {
part.push(acc);
acc = '';
parts.push(part);
part = new AST(null, ast);
continue;
}
if (c === ')') {
if (acc === '' && ast.#parts.length === 0) {
ast.#emptyExt = true;
}
part.push(acc);
acc = '';
ast.push(...parts, part);
return i;
}
acc += c;
}
// unfinished extglob
// if we got here, it was a malformed extglob! not an extglob, but
// maybe something else in there.
ast.type = null;
ast.#hasMagic = undefined;
ast.#parts = [str.substring(pos - 1)];
return i;
}
static fromGlob(pattern, options = {}) {
const ast = new AST(null, undefined, options);
AST.#parseAST(pattern, ast, 0, options);
return ast;
}
// returns the regular expression if there's magic, or the unescaped
// string if not.
toMMPattern() {
// should only be called on root
/* c8 ignore start */
if (this !== this.#root)
return this.#root.toMMPattern();
/* c8 ignore stop */
const glob = this.toString();
const [re, body, hasMagic, uflag] = this.toRegExpSource();
// if we're in nocase mode, and not nocaseMagicOnly, then we do
// still need a regular expression if we have to case-insensitively
// match capital/lowercase characters.
const anyMagic = hasMagic ||
this.#hasMagic ||
(this.#options.nocase &&
!this.#options.nocaseMagicOnly &&
glob.toUpperCase() !== glob.toLowerCase());
if (!anyMagic) {
return body;
}
const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
return Object.assign(new RegExp(`^${re}$`, flags), {
_src: re,
_glob: glob,
});
}
get options() {
return this.#options;
}
// returns the string match, the regexp source, whether there's magic
// in the regexp (so a regular expression is required) and whether or
// not the uflag is needed for the regular expression (for posix classes)
// TODO: instead of injecting the start/end at this point, just return
// the BODY of the regexp, along with the start/end portions suitable
// for binding the start/end in either a joined full-path makeRe context
// (where we bind to (^|/), or a standalone matchPart context (where
// we bind to ^, and not /). Otherwise slashes get duped!
//
// In part-matching mode, the start is:
// - if not isStart: nothing
// - if traversal possible, but not allowed: ^(?!\.\.?$)
// - if dots allowed or not possible: ^
// - if dots possible and not allowed: ^(?!\.)
// end is:
// - if not isEnd(): nothing
// - else: $
//
// In full-path matching mode, we put the slash at the START of the
// pattern, so start is:
// - if first pattern: same as part-matching mode
// - if not isStart(): nothing
// - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
// - if dots allowed or not possible: /
// - if dots possible and not allowed: /(?!\.)
// end is:
// - if last pattern, same as part-matching mode
// - else nothing
//
// Always put the (?:$|/) on negated tails, though, because that has to be
// there to bind the end of the negated pattern portion, and it's easier to
// just stick it in now rather than try to inject it later in the middle of
// the pattern.
//
// We can just always return the same end, and leave it up to the caller
// to know whether it's going to be used joined or in parts.
// And, if the start is adjusted slightly, can do the same there:
// - if not isStart: nothing
// - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
// - if dots allowed or not possible: (?:/|^)
// - if dots possible and not allowed: (?:/|^)(?!\.)
//
// But it's better to have a simpler binding without a conditional, for
// performance, so probably better to return both start options.
//
// Then the caller just ignores the end if it's not the first pattern,
// and the start always gets applied.
//
// But that's always going to be $ if it's the ending pattern, or nothing,
// so the caller can just attach $ at the end of the pattern when building.
//
// So the todo is:
// - better detect what kind of start is needed
// - return both flavors of starting pattern
// - attach $ at the end of the pattern when creating the actual RegExp
//
// Ah, but wait, no, that all only applies to the root when the first pattern
// is not an extglob. If the first pattern IS an extglob, then we need all
// that dot prevention biz to live in the extglob portions, because eg
// +(*|.x*) can match .xy but not .yx.
//
// So, return the two flavors if it's #root and the first child is not an
// AST, otherwise leave it to the child AST to handle it, and there,
// use the (?:^|/) style of start binding.
//
// Even simplified further:
// - Since the start for a join is eg /(?!\.) and the start for a part
// is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
// or start or whatever) and prepend ^ or / at the Regexp construction.
toRegExpSource(allowDot) {
const dot = allowDot ?? !!this.#options.dot;
if (this.#root === this)
this.#fillNegs();
if (!this.type) {
const noEmpty = this.isStart() && this.isEnd();
const src = this.#parts
.map(p => {
const [re, _, hasMagic, uflag] = typeof p === 'string'
? AST.#parseGlob(p, this.#hasMagic, noEmpty)
: p.toRegExpSource(allowDot);
this.#hasMagic = this.#hasMagic || hasMagic;
this.#uflag = this.#uflag || uflag;
return re;
})
.join('');
let start = '';
if (this.isStart()) {
if (typeof this.#parts[0] === 'string') {
// this is the string that will match the start of the pattern,
// so we need to protect against dots and such.
// '.' and '..' cannot match unless the pattern is that exactly,
// even if it starts with . or dot:true is set.
const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
if (!dotTravAllowed) {
const aps = addPatternStart;
// check if we have a possibility of matching . or ..,
// and prevent that.
const needNoTrav =
// dots are allowed, and the pattern starts with [ or .
(dot && aps.has(src.charAt(0))) ||
// the pattern starts with \., and then [ or .
(src.startsWith('\\.') && aps.has(src.charAt(2))) ||
// the pattern starts with \.\., and then [ or .
(src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
// no need to prevent dots if it can't match a dot, or if a
// sub-pattern will be preventing it anyway.
const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
}
}
}
// append the "end of path portion" pattern to negation tails
let end = '';
if (this.isEnd() &&
this.#root.#filledNegs &&
this.#parent?.type === '!') {
end = '(?:$|\\/)';
}
const final = start + src + end;
return [
final,
unescape$1(src),
(this.#hasMagic = !!this.#hasMagic),
this.#uflag,
];
}
// We need to calculate the body *twice* if it's a repeat pattern
// at the start, once in nodot mode, then again in dot mode, so a
// pattern like *(?) can match 'x.y'
const repeated = this.type === '*' || this.type === '+';
// some kind of extglob
const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
let body = this.#partsToRegExp(dot);
if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
// invalid extglob, has to at least be *something* present, if it's
// the entire path portion.
const s = this.toString();
this.#parts = [s];
this.type = null;
this.#hasMagic = undefined;
return [s, unescape$1(this.toString()), false, false];
}
// XXX abstract out this map method
let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
? ''
: this.#partsToRegExp(true);
if (bodyDotAllowed === body) {
bodyDotAllowed = '';
}
if (bodyDotAllowed) {
body = `(?:${body})(?:${bodyDotAllowed})*?`;
}
// an empty !() is exactly equivalent to a starNoEmpty
let final = '';
if (this.type === '!' && this.#emptyExt) {
final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
}
else {
const close = this.type === '!'
? // !() must match something,but !(x) can match ''
'))' +
(this.isStart() && !dot && !allowDot ? startNoDot : '') +
star$1 +
')'
: this.type === '@'
? ')'
: this.type === '?'
? ')?'
: this.type === '+' && bodyDotAllowed
? ')'
: this.type === '*' && bodyDotAllowed
? `)?`
: `)${this.type}`;
final = start + body + close;
}
return [
final,
unescape$1(body),
(this.#hasMagic = !!this.#hasMagic),
this.#uflag,
];
}
#partsToRegExp(dot) {
return this.#parts
.map(p => {
// extglob ASTs should only contain parent ASTs
/* c8 ignore start */
if (typeof p === 'string') {
throw new Error('string type in extglob ast??');
}
/* c8 ignore stop */
// can ignore hasMagic, because extglobs are already always magic
const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
this.#uflag = this.#uflag || uflag;
return re;
})
.filter(p => !(this.isStart() && this.isEnd()) || !!p)
.join('|');
}
static #parseGlob(glob, hasMagic, noEmpty = false) {
let escaping = false;
let re = '';
let uflag = false;
for (let i = 0; i < glob.length; i++) {
const c = glob.charAt(i);
if (escaping) {
escaping = false;
re += (reSpecials.has(c) ? '\\' : '') + c;
continue;
}
if (c === '\\') {
if (i === glob.length - 1) {
re += '\\\\';
}
else {
escaping = true;
}
continue;
}
if (c === '[') {
const [src, needUflag, consumed, magic] = parseClass(glob, i);
if (consumed) {
re += src;
uflag = uflag || needUflag;
i += consumed - 1;
hasMagic = hasMagic || magic;
continue;
}
}
if (c === '*') {
if (noEmpty && glob === '*')
re += starNoEmpty;
else
re += star$1;
hasMagic = true;
continue;
}
if (c === '?') {
re += qmark$1;
hasMagic = true;
continue;
}
re += regExpEscape$1(c);
}
return [re, unescape$1(glob), !!hasMagic, uflag];
}
}
/**
* Escape all magic characters in a glob pattern.
*
* If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
* option is used, then characters are escaped by wrapping in `[]`, because
* a magic character wrapped in a character class can only be satisfied by
* that exact character. In this mode, `\` is _not_ escaped, because it is
* not interpreted as a magic character, but instead as a path separator.
*/
const escape$2 = (s, { windowsPathsNoEscape = false, } = {}) => {
// don't need to escape +@! because we escape the parens
// that make those magic, and escaping ! as [!] isn't valid,
// because [!]] is a valid glob class meaning not ']'.
return windowsPathsNoEscape
? s.replace(/[?*()[\]]/g, '[$&]')
: s.replace(/[?*()[\]\\]/g, '\\$&');
};
const minimatch = (p, pattern, options = {}) => {
assertValidPattern(pattern);
// shortcut: comments match nothing.
if (!options.nocomment && pattern.charAt(0) === '#') {
return false;
}
return new Minimatch(pattern, options).match(p);
};
// Optimized checking for the most common glob patterns.
const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
const starDotExtTestNocase = (ext) => {
ext = ext.toLowerCase();
return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
};
const starDotExtTestNocaseDot = (ext) => {
ext = ext.toLowerCase();
return (f) => f.toLowerCase().endsWith(ext);
};
const starDotStarRE = /^\*+\.\*+$/;
const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
const dotStarRE = /^\.\*+$/;
const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
const starRE = /^\*+$/;
const starTest = (f) => f.length !== 0 && !f.startsWith('.');
const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
const qmarksTestNocase = ([$0, ext = '']) => {
const noext = qmarksTestNoExt([$0]);
if (!ext)
return noext;
ext = ext.toLowerCase();
return (f) => noext(f) && f.toLowerCase().endsWith(ext);
};
const qmarksTestNocaseDot = ([$0, ext = '']) => {
const noext = qmarksTestNoExtDot([$0]);
if (!ext)
return noext;
ext = ext.toLowerCase();
return (f) => noext(f) && f.toLowerCase().endsWith(ext);
};
const qmarksTestDot = ([$0, ext = '']) => {
const noext = qmarksTestNoExtDot([$0]);
return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
};
const qmarksTest = ([$0, ext = '']) => {
const noext = qmarksTestNoExt([$0]);
return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
};
const qmarksTestNoExt = ([$0]) => {
const len = $0.length;
return (f) => f.length === len && !f.startsWith('.');
};
const qmarksTestNoExtDot = ([$0]) => {
const len = $0.length;
return (f) => f.length === len && f !== '.' && f !== '..';
};
/* c8 ignore start */
const defaultPlatform$2 = (typeof process === 'object' && process
? (typeof process.env === 'object' &&
process.env &&
process.env.__MINIMATCH_TESTING_PLATFORM__) ||
process.platform
: 'posix');
const path$j = {
win32: { sep: '\\' },
posix: { sep: '/' },
};
/* c8 ignore stop */
const sep = defaultPlatform$2 === 'win32' ? path$j.win32.sep : path$j.posix.sep;
minimatch.sep = sep;
const GLOBSTAR$2 = Symbol('globstar **');
minimatch.GLOBSTAR = GLOBSTAR$2;
// any single thing other than /
// don't need to escape / when using new RegExp()
const qmark = '[^/]';
// * => any number of characters
const star = qmark + '*?';
// ** when dots are allowed. Anything goes, except .. and .
// not (^ or / followed by one or two dots followed by $ or /),
// followed by anything, any number of times.
const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
// not a ^ or / followed by a dot,
// followed by anything, any number of times.
const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
const filter$1 = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
minimatch.filter = filter$1;
const ext = (a, b = {}) => Object.assign({}, a, b);
const defaults = (def) => {
if (!def || typeof def !== 'object' || !Object.keys(def).length) {
return minimatch;
}
const orig = minimatch;
const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
return Object.assign(m, {
Minimatch: class Minimatch extends orig.Minimatch {
constructor(pattern, options = {}) {
super(pattern, ext(def, options));
}
static defaults(options) {
return orig.defaults(ext(def, options)).Minimatch;
}
},
AST: class AST extends orig.AST {
/* c8 ignore start */
constructor(type, parent, options = {}) {
super(type, parent, ext(def, options));
}
/* c8 ignore stop */
static fromGlob(pattern, options = {}) {
return orig.AST.fromGlob(pattern, ext(def, options));
}
},
unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
escape: (s, options = {}) => orig.escape(s, ext(def, options)),
filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
defaults: (options) => orig.defaults(ext(def, options)),
makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
sep: orig.sep,
GLOBSTAR: GLOBSTAR$2,
});
};
minimatch.defaults = defaults;
// Brace expansion:
// a{b,c}d -> abd acd
// a{b,}c -> abc ac
// a{0..3}d -> a0d a1d a2d a3d
// a{b,c{d,e}f}g -> abg acdfg acefg
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
//
// Invalid sets are not expanded.
// a{2..}b -> a{2..}b
// a{b}c -> a{b}c
const braceExpand = (pattern, options = {}) => {
assertValidPattern(pattern);
// Thanks to Yeting Li <https://github.com/yetingli> for
// improving this regexp to avoid a ReDOS vulnerability.
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
// shortcut. no need to expand.
return [pattern];
}
return expand$4(pattern);
};
minimatch.braceExpand = braceExpand;
// parse a component of the expanded set.
// At this point, no pattern may contain "/" in it
// so we're going to return a 2d array, where each entry is the full
// pattern, split on '/', and then turned into a regular expression.
// A regexp is made at the end which joins each array with an
// escaped /, and another full one which joins each regexp with |.
//
// Following the lead of Bash 4.1, note that "**" only has special meaning
// when it is the *only* thing in a path portion. Otherwise, any series
// of * is equivalent to a single *. Globstar behavior is enabled by
// default, and can be disabled by setting options.noglobstar.
const makeRe$1 = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
minimatch.makeRe = makeRe$1;
const match = (list, pattern, options = {}) => {
const mm = new Minimatch(pattern, options);
list = list.filter(f => mm.match(f));
if (mm.options.nonull && !list.length) {
list.push(pattern);
}
return list;
};
minimatch.match = match;
// replace stuff like \* with *
const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
class Minimatch {
options;
set;
pattern;
windowsPathsNoEscape;
nonegate;
negate;
comment;
empty;
preserveMultipleSlashes;
partial;
globSet;
globParts;
nocase;
isWindows;
platform;
windowsNoMagicRoot;
regexp;
constructor(pattern, options = {}) {
assertValidPattern(pattern);
options = options || {};
this.options = options;
this.pattern = pattern;
this.platform = options.platform || defaultPlatform$2;
this.isWindows = this.platform === 'win32';
this.windowsPathsNoEscape =
!!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
if (this.windowsPathsNoEscape) {
this.pattern = this.pattern.replace(/\\/g, '/');
}
this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
this.regexp = null;
this.negate = false;
this.nonegate = !!options.nonegate;
this.comment = false;
this.empty = false;
this.partial = !!options.partial;
this.nocase = !!this.options.nocase;
this.windowsNoMagicRoot =
options.windowsNoMagicRoot !== undefined
? options.windowsNoMagicRoot
: !!(this.isWindows && this.nocase);
this.globSet = [];
this.globParts = [];
this.set = [];
// make the set of regexps etc.
this.make();
}
hasMagic() {
if (this.options.magicalBraces && this.set.length > 1) {
return true;
}
for (const pattern of this.set) {
for (const part of pattern) {
if (typeof part !== 'string')
return true;
}
}
return false;
}
debug(..._) { }
make() {
const pattern = this.pattern;
const options = this.options;
// empty patterns and comments match nothing.
if (!options.nocomment && pattern.charAt(0) === '#') {
this.comment = true;
return;
}
if (!pattern) {
this.empty = true;
return;
}
// step 1: figure out negation, etc.
this.parseNegate();
// step 2: expand braces
this.globSet = [...new Set(this.braceExpand())];
if (options.debug) {
this.debug = (...args) => console.error(...args);
}
this.debug(this.pattern, this.globSet);
// step 3: now we have a set, so turn each one into a series of
// path-portion matching patterns.
// These will be regexps, except in the case of "**", which is
// set to the GLOBSTAR object for globstar behavior,
// and will not contain any / characters
//
// First, we preprocess to make the glob pattern sets a bit simpler
// and deduped. There are some perf-killing patterns that can cause
// problems with a glob walk, but we can simplify them down a bit.
const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
this.globParts = this.preprocess(rawGlobParts);
this.debug(this.pattern, this.globParts);
// glob --> regexps
let set = this.globParts.map((s, _, __) => {
if (this.isWindows && this.windowsNoMagicRoot) {
// check if it's a drive or unc path.
const isUNC = s[0] === '' &&
s[1] === '' &&
(s[2] === '?' || !globMagic.test(s[2])) &&
!globMagic.test(s[3]);
const isDrive = /^[a-z]:/i.test(s[0]);
if (isUNC) {
return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
}
else if (isDrive) {
return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
}
}
return s.map(ss => this.parse(ss));
});
this.debug(this.pattern, set);
// filter out everything that didn't compile properly.
this.set = set.filter(s => s.indexOf(false) === -1);
// do not treat the ? in UNC paths as magic
if (this.isWindows) {
for (let i = 0; i < this.set.length; i++) {
const p = this.set[i];
if (p[0] === '' &&
p[1] === '' &&
this.globParts[i][2] === '?' &&
typeof p[3] === 'string' &&
/^[a-z]:$/i.test(p[3])) {
p[2] = '?';
}
}
}
this.debug(this.pattern, this.set);
}
// various transforms to equivalent pattern sets that are
// faster to process in a filesystem walk. The goal is to
// eliminate what we can, and push all ** patterns as far
// to the right as possible, even if it increases the number
// of patterns that we have to process.
preprocess(globParts) {
// if we're not in globstar mode, then turn all ** into *
if (this.options.noglobstar) {
for (let i = 0; i < globParts.length; i++) {
for (let j = 0; j < globParts[i].length; j++) {
if (globParts[i][j] === '**') {
globParts[i][j] = '*';
}
}
}
}
const { optimizationLevel = 1 } = this.options;
if (optimizationLevel >= 2) {
// aggressive optimization for the purpose of fs walking
globParts = this.firstPhasePreProcess(globParts);
globParts = this.secondPhasePreProcess(globParts);
}
else if (optimizationLevel >= 1) {
// just basic optimizations to remove some .. parts
globParts = this.levelOneOptimize(globParts);
}
else {
// just collapse multiple ** portions into one
globParts = this.adjascentGlobstarOptimize(globParts);
}
return globParts;
}
// just get rid of adjascent ** portions
adjascentGlobstarOptimize(globParts) {
return globParts.map(parts => {
let gs = -1;
while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
let i = gs;
while (parts[i + 1] === '**') {
i++;
}
if (i !== gs) {
parts.splice(gs, i - gs);
}
}
return parts;
});
}
// get rid of adjascent ** and resolve .. portions
levelOneOptimize(globParts) {
return globParts.map(parts => {
parts = parts.reduce((set, part) => {
const prev = set[set.length - 1];
if (part === '**' && prev === '**') {
return set;
}
if (part === '..') {
if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
set.pop();
return set;
}
}
set.push(part);
return set;
}, []);
return parts.length === 0 ? [''] : parts;
});
}
levelTwoFileOptimize(parts) {
if (!Array.isArray(parts)) {
parts = this.slashSplit(parts);
}
let didSomething = false;
do {
didSomething = false;
// <pre>/<e>/<rest> -> <pre>/<rest>
if (!this.preserveMultipleSlashes) {
for (let i = 1; i < parts.length - 1; i++) {
const p = parts[i];
// don't squeeze out UNC patterns
if (i === 1 && p === '' && parts[0] === '')
continue;
if (p === '.' || p === '') {
didSomething = true;
parts.splice(i, 1);
i--;
}
}
if (parts[0] === '.' &&
parts.length === 2 &&
(parts[1] === '.' || parts[1] === '')) {
didSomething = true;
parts.pop();
}
}
// <pre>/<p>/../<rest> -> <pre>/<rest>
let dd = 0;
while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
const p = parts[dd - 1];
if (p && p !== '.' && p !== '..' && p !== '**') {
didSomething = true;
parts.splice(dd - 1, 2);
dd -= 2;
}
}
} while (didSomething);
return parts.length === 0 ? [''] : parts;
}
// First phase: single-pattern processing
// <pre> is 1 or more portions
// <rest> is 1 or more portions
// <p> is any portion other than ., .., '', or **
// <e> is . or ''
//
// **/.. is *brutal* for filesystem walking performance, because
// it effectively resets the recursive walk each time it occurs,
// and ** cannot be reduced out by a .. pattern part like a regexp
// or most strings (other than .., ., and '') can be.
//
// <pre>/**/../<p>/<p>/<rest> -> {<pre>/../<p>/<p>/<rest>,<pre>/**/<p>/<p>/<rest>}
// <pre>/<e>/<rest> -> <pre>/<rest>
// <pre>/<p>/../<rest> -> <pre>/<rest>
// **/**/<rest> -> **/<rest>
//
// **/*/<rest> -> */**/<rest> <== not valid because ** doesn't follow
// this WOULD be allowed if ** did follow symlinks, or * didn't
firstPhasePreProcess(globParts) {
let didSomething = false;
do {
didSomething = false;
// <pre>/**/../<p>/<p>/<rest> -> {<pre>/../<p>/<p>/<rest>,<pre>/**/<p>/<p>/<rest>}
for (let parts of globParts) {
let gs = -1;
while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
let gss = gs;
while (parts[gss + 1] === '**') {
// <pre>/**/**/<rest> -> <pre>/**/<rest>
gss++;
}
// eg, if gs is 2 and gss is 4, that means we have 3 **
// parts, and can remove 2 of them.
if (gss > gs) {
parts.splice(gs + 1, gss - gs);
}
let next = parts[gs + 1];
const p = parts[gs + 2];
const p2 = parts[gs + 3];
if (next !== '..')
continue;
if (!p ||
p === '.' ||
p === '..' ||
!p2 ||
p2 === '.' ||
p2 === '..') {
continue;
}
didSomething = true;
// edit parts in place, and push the new one
parts.splice(gs, 1);
const other = parts.slice(0);
other[gs] = '**';
globParts.push(other);
gs--;
}
// <pre>/<e>/<rest> -> <pre>/<rest>
if (!this.preserveMultipleSlashes) {
for (let i = 1; i < parts.length - 1; i++) {
const p = parts[i];
// don't squeeze out UNC patterns
if (i === 1 && p === '' && parts[0] === '')
continue;
if (p === '.' || p === '') {
didSomething = true;
parts.splice(i, 1);
i--;
}
}
if (parts[0] === '.' &&
parts.length === 2 &&
(parts[1] === '.' || parts[1] === '')) {
didSomething = true;
parts.pop();
}
}
// <pre>/<p>/../<rest> -> <pre>/<rest>
let dd = 0;
while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
const p = parts[dd - 1];
if (p && p !== '.' && p !== '..' && p !== '**') {
didSomething = true;
const needDot = dd === 1 && parts[dd + 1] === '**';
const splin = needDot ? ['.'] : [];
parts.splice(dd - 1, 2, ...splin);
if (parts.length === 0)
parts.push('');
dd -= 2;
}
}
}
} while (didSomething);
return globParts;
}
// second phase: multi-pattern dedupes
// {<pre>/*/<rest>,<pre>/<p>/<rest>} -> <pre>/*/<rest>
// {<pre>/<rest>,<pre>/<rest>} -> <pre>/<rest>
// {<pre>/**/<rest>,<pre>/<rest>} -> <pre>/**/<rest>
//
// {<pre>/**/<rest>,<pre>/**/<p>/<rest>} -> <pre>/**/<rest>
// ^-- not valid because ** doens't follow symlinks
secondPhasePreProcess(globParts) {
for (let i = 0; i < globParts.length - 1; i++) {
for (let j = i + 1; j < globParts.length; j++) {
const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
if (matched) {
globParts[i] = [];
globParts[j] = matched;
break;
}
}
}
return globParts.filter(gs => gs.length);
}
partsMatch(a, b, emptyGSMatch = false) {
let ai = 0;
let bi = 0;
let result = [];
let which = '';
while (ai < a.length && bi < b.length) {
if (a[ai] === b[bi]) {
result.push(which === 'b' ? b[bi] : a[ai]);
ai++;
bi++;
}
else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
result.push(a[ai]);
ai++;
}
else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
result.push(b[bi]);
bi++;
}
else if (a[ai] === '*' &&
b[bi] &&
(this.options.dot || !b[bi].startsWith('.')) &&
b[bi] !== '**') {
if (which === 'b')
return false;
which = 'a';
result.push(a[ai]);
ai++;
bi++;
}
else if (b[bi] === '*' &&
a[ai] &&
(this.options.dot || !a[ai].startsWith('.')) &&
a[ai] !== '**') {
if (which === 'a')
return false;
which = 'b';
result.push(b[bi]);
ai++;
bi++;
}
else {
return false;
}
}
// if we fall out of the loop, it means they two are identical
// as long as their lengths match
return a.length === b.length && result;
}
parseNegate() {
if (this.nonegate)
return;
const pattern = this.pattern;
let negate = false;
let negateOffset = 0;
for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
negate = !negate;
negateOffset++;
}
if (negateOffset)
this.pattern = pattern.slice(negateOffset);
this.negate = negate;
}
// set partial to true to test if, for example,
// "/a/b" matches the start of "/*/b/*/d"
// Partial means, if you run out of file before you run
// out of pattern, then that's fine, as long as all
// the parts match.
matchOne(file, pattern, partial = false) {
const options = this.options;
// UNC paths like //?/X:/... can match X:/... and vice versa
// Drive letters in absolute drive or unc paths are always compared
// case-insensitively.
if (this.isWindows) {
const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
const fileUNC = !fileDrive &&
file[0] === '' &&
file[1] === '' &&
file[2] === '?' &&
/^[a-z]:$/i.test(file[3]);
const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
const patternUNC = !patternDrive &&
pattern[0] === '' &&
pattern[1] === '' &&
pattern[2] === '?' &&
typeof pattern[3] === 'string' &&
/^[a-z]:$/i.test(pattern[3]);
const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
if (typeof fdi === 'number' && typeof pdi === 'number') {
const [fd, pd] = [file[fdi], pattern[pdi]];
if (fd.toLowerCase() === pd.toLowerCase()) {
pattern[pdi] = fd;
if (pdi > fdi) {
pattern = pattern.slice(pdi);
}
else if (fdi > pdi) {
file = file.slice(fdi);
}
}
}
}
// resolve and reduce . and .. portions in the file as well.
// dont' need to do the second phase, because it's only one string[]
const { optimizationLevel = 1 } = this.options;
if (optimizationLevel >= 2) {
file = this.levelTwoFileOptimize(file);
}
this.debug('matchOne', this, { file, pattern });
this.debug('matchOne', file.length, pattern.length);
for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
this.debug('matchOne loop');
var p = pattern[pi];
var f = file[fi];
this.debug(pattern, p, f);
// should be impossible.
// some invalid regexp stuff in the set.
/* c8 ignore start */
if (p === false) {
return false;
}
/* c8 ignore stop */
if (p === GLOBSTAR$2) {
this.debug('GLOBSTAR', [pattern, p, f]);
// "**"
// a/**/b/**/c would match the following:
// a/b/x/y/z/c
// a/x/y/z/b/c
// a/b/x/b/x/c
// a/b/c
// To do this, take the rest of the pattern after
// the **, and see if it would match the file remainder.
// If so, return success.
// If not, the ** "swallows" a segment, and try again.
// This is recursively awful.
//
// a/**/b/**/c matching a/b/x/y/z/c
// - a matches a
// - doublestar
// - matchOne(b/x/y/z/c, b/**/c)
// - b matches b
// - doublestar
// - matchOne(x/y/z/c, c) -> no
// - matchOne(y/z/c, c) -> no
// - matchOne(z/c, c) -> no
// - matchOne(c, c) yes, hit
var fr = fi;
var pr = pi + 1;
if (pr === pl) {
this.debug('** at the end');
// a ** at the end will just swallow the rest.
// We have found a match.
// however, it will not swallow /.x, unless
// options.dot is set.
// . and .. are *never* matched by **, for explosively
// exponential reasons.
for (; fi < fl; fi++) {
if (file[fi] === '.' ||
file[fi] === '..' ||
(!options.dot && file[fi].charAt(0) === '.'))
return false;
}
return true;
}
// ok, let's see if we can swallow whatever we can.
while (fr < fl) {
var swallowee = file[fr];
this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
// XXX remove this slice. Just pass the start index.
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
this.debug('globstar found match!', fr, fl, swallowee);
// found a match.
return true;
}
else {
// can't swallow "." or ".." ever.
// can only swallow ".foo" when explicitly asked.
if (swallowee === '.' ||
swallowee === '..' ||
(!options.dot && swallowee.charAt(0) === '.')) {
this.debug('dot detected!', file, fr, pattern, pr);
break;
}
// ** swallows a segment, and continue.
this.debug('globstar swallow a segment, and continue');
fr++;
}
}
// no match was found.
// However, in partial mode, we can't say this is necessarily over.
/* c8 ignore start */
if (partial) {
// ran out of file
this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
if (fr === fl) {
return true;
}
}
/* c8 ignore stop */
return false;
}
// something other than **
// non-magic patterns just have to match exactly
// patterns with magic have been turned into regexps.
let hit;
if (typeof p === 'string') {
hit = f === p;
this.debug('string match', p, f, hit);
}
else {
hit = p.test(f);
this.debug('pattern match', p, f, hit);
}
if (!hit)
return false;
}
// Note: ending in / means that we'll get a final ""
// at the end of the pattern. This can only match a
// corresponding "" at the end of the file.
// If the file ends in /, then it can only match a
// a pattern that ends in /, unless the pattern just
// doesn't have any more for it. But, a/b/ should *not*
// match "a/b/*", even though "" matches against the
// [^/]*? pattern, except in partial mode, where it might
// simply not be reached yet.
// However, a/b/ should still satisfy a/*
// now either we fell off the end of the pattern, or we're done.
if (fi === fl && pi === pl) {
// ran out of pattern and filename at the same time.
// an exact hit!
return true;
}
else if (fi === fl) {
// ran out of file, but still had pattern left.
// this is ok if we're doing the match as part of
// a glob fs traversal.
return partial;
}
else if (pi === pl) {
// ran out of pattern, still have file left.
// this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash.
// a/* should match a/b/
return fi === fl - 1 && file[fi] === '';
/* c8 ignore start */
}
else {
// should be unreachable.
throw new Error('wtf?');
}
/* c8 ignore stop */
}
braceExpand() {
return braceExpand(this.pattern, this.options);
}
parse(pattern) {
assertValidPattern(pattern);
const options = this.options;
// shortcuts
if (pattern === '**')
return GLOBSTAR$2;
if (pattern === '')
return '';
// far and away, the most common glob pattern parts are
// *, *.*, and *.<ext> Add a fast check method for those.
let m;
let fastTest = null;
if ((m = pattern.match(starRE))) {
fastTest = options.dot ? starTestDot : starTest;
}
else if ((m = pattern.match(starDotExtRE))) {
fastTest = (options.nocase
? options.dot
? starDotExtTestNocaseDot
: starDotExtTestNocase
: options.dot
? starDotExtTestDot
: starDotExtTest)(m[1]);
}
else if ((m = pattern.match(qmarksRE))) {
fastTest = (options.nocase
? options.dot
? qmarksTestNocaseDot
: qmarksTestNocase
: options.dot
? qmarksTestDot
: qmarksTest)(m);
}
else if ((m = pattern.match(starDotStarRE))) {
fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
}
else if ((m = pattern.match(dotStarRE))) {
fastTest = dotStarTest;
}
const re = AST.fromGlob(pattern, this.options).toMMPattern();
if (fastTest && typeof re === 'object') {
// Avoids overriding in frozen environments
Reflect.defineProperty(re, 'test', { value: fastTest });
}
return re;
}
makeRe() {
if (this.regexp || this.regexp === false)
return this.regexp;
// at this point, this.set is a 2d array of partial
// pattern strings, or "**".
//
// It's better to use .match(). This function shouldn't
// be used, really, but it's pretty convenient sometimes,
// when you just want to work with a regex.
const set = this.set;
if (!set.length) {
this.regexp = false;
return this.regexp;
}
const options = this.options;
const twoStar = options.noglobstar
? star
: options.dot
? twoStarDot
: twoStarNoDot;
const flags = new Set(options.nocase ? ['i'] : []);
// regexpify non-globstar patterns
// if ** is only item, then we just do one twoStar
// if ** is first, and there are more, prepend (\/|twoStar\/)? to next
// if ** is last, append (\/twoStar|) to previous
// if ** is in the middle, append (\/|\/twoStar\/) to previous
// then filter out GLOBSTAR symbols
let re = set
.map(pattern => {
const pp = pattern.map(p => {
if (p instanceof RegExp) {
for (const f of p.flags.split(''))
flags.add(f);
}
return typeof p === 'string'
? regExpEscape(p)
: p === GLOBSTAR$2
? GLOBSTAR$2
: p._src;
});
pp.forEach((p, i) => {
const next = pp[i + 1];
const prev = pp[i - 1];
if (p !== GLOBSTAR$2 || prev === GLOBSTAR$2) {
return;
}
if (prev === undefined) {
if (next !== undefined && next !== GLOBSTAR$2) {
pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
}
else {
pp[i] = twoStar;
}
}
else if (next === undefined) {
pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
}
else if (next !== GLOBSTAR$2) {
pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
pp[i + 1] = GLOBSTAR$2;
}
});
return pp.filter(p => p !== GLOBSTAR$2).join('/');
})
.join('|');
// need to wrap in parens if we had more than one thing with |,
// otherwise only the first will be anchored to ^ and the last to $
const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
// must match entire pattern
// ending in a * or ** will make it less strict.
re = '^' + open + re + close + '$';
// can match anything, as long as it's not this.
if (this.negate)
re = '^(?!' + re + ').+$';
try {
this.regexp = new RegExp(re, [...flags].join(''));
/* c8 ignore start */
}
catch (ex) {
// should be impossible
this.regexp = false;
}
/* c8 ignore stop */
return this.regexp;
}
slashSplit(p) {
// if p starts with // on windows, we preserve that
// so that UNC paths aren't broken. Otherwise, any number of
// / characters are coalesced into one, unless
// preserveMultipleSlashes is set to true.
if (this.preserveMultipleSlashes) {
return p.split('/');
}
else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
// add an extra '' for the one we lose
return ['', ...p.split(/\/+/)];
}
else {
return p.split(/\/+/);
}
}
match(f, partial = this.partial) {
this.debug('match', f, this.pattern);
// short-circuit in the case of busted things.
// comments, etc.
if (this.comment) {
return false;
}
if (this.empty) {
return f === '';
}
if (f === '/' && partial) {
return true;
}
const options = this.options;
// windows: need to use /, not \
if (this.isWindows) {
f = f.split('\\').join('/');
}
// treat the test path as a set of pathparts.
const ff = this.slashSplit(f);
this.debug(this.pattern, 'split', ff);
// just ONE of the pattern sets in this.set needs to match
// in order for it to be valid. If negating, then just one
// match means that we have failed.
// Either way, return on the first hit.
const set = this.set;
this.debug(this.pattern, 'set', set);
// Find the basename of the path by looking for the last non-empty segment
let filename = ff[ff.length - 1];
if (!filename) {
for (let i = ff.length - 2; !filename && i >= 0; i--) {
filename = ff[i];
}
}
for (let i = 0; i < set.length; i++) {
const pattern = set[i];
let file = ff;
if (options.matchBase && pattern.length === 1) {
file = [filename];
}
const hit = this.matchOne(file, pattern, partial);
if (hit) {
if (options.flipNegate) {
return true;
}
return !this.negate;
}
}
// didn't get any hits. this is success if it's a negative
// pattern, failure otherwise.
if (options.flipNegate) {
return false;
}
return this.negate;
}
static defaults(def) {
return minimatch.defaults(def).Minimatch;
}
}
/* c8 ignore stop */
minimatch.AST = AST;
minimatch.Minimatch = Minimatch;
minimatch.escape = escape$2;
minimatch.unescape = unescape$1;
/**
* @module LRUCache
*/
const perf = typeof performance === 'object' &&
performance &&
typeof performance.now === 'function'
? performance
: Date;
const warned$1 = new Set();
/* c8 ignore start */
const PROCESS = (typeof process === 'object' && !!process ? process : {});
/* c8 ignore start */
const emitWarning = (msg, type, code, fn) => {
typeof PROCESS.emitWarning === 'function'
? PROCESS.emitWarning(msg, type, code, fn)
: console.error(`[${code}] ${type}: ${msg}`);
};
let AC = globalThis.AbortController;
let AS = globalThis.AbortSignal;
/* c8 ignore start */
if (typeof AC === 'undefined') {
//@ts-ignore
AS = class AbortSignal {
onabort;
_onabort = [];
reason;
aborted = false;
addEventListener(_, fn) {
this._onabort.push(fn);
}
};
//@ts-ignore
AC = class AbortController {
constructor() {
warnACPolyfill();
}
signal = new AS();
abort(reason) {
if (this.signal.aborted)
return;
//@ts-ignore
this.signal.reason = reason;
//@ts-ignore
this.signal.aborted = true;
//@ts-ignore
for (const fn of this.signal._onabort) {
fn(reason);
}
this.signal.onabort?.(reason);
}
};
let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
const warnACPolyfill = () => {
if (!printACPolyfillWarning)
return;
printACPolyfillWarning = false;
emitWarning('AbortController is not defined. If using lru-cache in ' +
'node 14, load an AbortController polyfill from the ' +
'`node-abort-controller` package. A minimal polyfill is ' +
'provided for use by LRUCache.fetch(), but it should not be ' +
'relied upon in other contexts (eg, passing it to other APIs that ' +
'use AbortController/AbortSignal might have undesirable effects). ' +
'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
};
}
/* c8 ignore stop */
const shouldWarn = (code) => !warned$1.has(code);
const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
/* c8 ignore start */
// This is a little bit ridiculous, tbh.
// The maximum array length is 2^32-1 or thereabouts on most JS impls.
// And well before that point, you're caching the entire world, I mean,
// that's ~32GB of just integers for the next/prev links, plus whatever
// else to hold that many keys and values. Just filling the memory with
// zeroes at init time is brutal when you get that big.
// But why not be complete?
// Maybe in the future, these limits will have expanded.
const getUintArray = (max) => !isPosInt(max)
? null
: max <= Math.pow(2, 8)
? Uint8Array
: max <= Math.pow(2, 16)
? Uint16Array
: max <= Math.pow(2, 32)
? Uint32Array
: max <= Number.MAX_SAFE_INTEGER
? ZeroArray
: null;
/* c8 ignore stop */
class ZeroArray extends Array {
constructor(size) {
super(size);
this.fill(0);
}
}
class Stack {
heap;
length;
// private constructor
static #constructing = false;
static create(max) {
const HeapCls = getUintArray(max);
if (!HeapCls)
return [];
Stack.#constructing = true;
const s = new Stack(max, HeapCls);
Stack.#constructing = false;
return s;
}
constructor(max, HeapCls) {
/* c8 ignore start */
if (!Stack.#constructing) {
throw new TypeError('instantiate Stack using Stack.create(n)');
}
/* c8 ignore stop */
this.heap = new HeapCls(max);
this.length = 0;
}
push(n) {
this.heap[this.length++] = n;
}
pop() {
return this.heap[--this.length];
}
}
/**
* Default export, the thing you're using this module to get.
*
* The `K` and `V` types define the key and value types, respectively. The
* optional `FC` type defines the type of the `context` object passed to
* `cache.fetch()` and `cache.memo()`.
*
* Keys and values **must not** be `null` or `undefined`.
*
* All properties from the options object (with the exception of `max`,
* `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
* added as normal public members. (The listed options are read-only getters.)
*
* Changing any of these will alter the defaults for subsequent method calls.
*/
class LRUCache {
// options that cannot be changed without disaster
#max;
#maxSize;
#dispose;
#disposeAfter;
#fetchMethod;
#memoMethod;
/**
* {@link LRUCache.OptionsBase.ttl}
*/
ttl;
/**
* {@link LRUCache.OptionsBase.ttlResolution}
*/
ttlResolution;
/**
* {@link LRUCache.OptionsBase.ttlAutopurge}
*/
ttlAutopurge;
/**
* {@link LRUCache.OptionsBase.updateAgeOnGet}
*/
updateAgeOnGet;
/**
* {@link LRUCache.OptionsBase.updateAgeOnHas}
*/
updateAgeOnHas;
/**
* {@link LRUCache.OptionsBase.allowStale}
*/
allowStale;
/**
* {@link LRUCache.OptionsBase.noDisposeOnSet}
*/
noDisposeOnSet;
/**
* {@link LRUCache.OptionsBase.noUpdateTTL}
*/
noUpdateTTL;
/**
* {@link LRUCache.OptionsBase.maxEntrySize}
*/
maxEntrySize;
/**
* {@link LRUCache.OptionsBase.sizeCalculation}
*/
sizeCalculation;
/**
* {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
*/
noDeleteOnFetchRejection;
/**
* {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
*/
noDeleteOnStaleGet;
/**
* {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
*/
allowStaleOnFetchAbort;
/**
* {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
*/
allowStaleOnFetchRejection;
/**
* {@link LRUCache.OptionsBase.ignoreFetchAbort}
*/
ignoreFetchAbort;
// computed properties
#size;
#calculatedSize;
#keyMap;
#keyList;
#valList;
#next;
#prev;
#head;
#tail;
#free;
#disposed;
#sizes;
#starts;
#ttls;
#hasDispose;
#hasFetchMethod;
#hasDisposeAfter;
/**
* Do not call this method unless you need to inspect the
* inner workings of the cache. If anything returned by this
* object is modified in any way, strange breakage may occur.
*
* These fields are private for a reason!
*
* @internal
*/
static unsafeExposeInternals(c) {
return {
// properties
starts: c.#starts,
ttls: c.#ttls,
sizes: c.#sizes,
keyMap: c.#keyMap,
keyList: c.#keyList,
valList: c.#valList,
next: c.#next,
prev: c.#prev,
get head() {
return c.#head;
},
get tail() {
return c.#tail;
},
free: c.#free,
// methods
isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
moveToTail: (index) => c.#moveToTail(index),
indexes: (options) => c.#indexes(options),
rindexes: (options) => c.#rindexes(options),
isStale: (index) => c.#isStale(index),
};
}
// Protected read-only members
/**
* {@link LRUCache.OptionsBase.max} (read-only)
*/
get max() {
return this.#max;
}
/**
* {@link LRUCache.OptionsBase.maxSize} (read-only)
*/
get maxSize() {
return this.#maxSize;
}
/**
* The total computed size of items in the cache (read-only)
*/
get calculatedSize() {
return this.#calculatedSize;
}
/**
* The number of items stored in the cache (read-only)
*/
get size() {
return this.#size;
}
/**
* {@link LRUCache.OptionsBase.fetchMethod} (read-only)
*/
get fetchMethod() {
return this.#fetchMethod;
}
get memoMethod() {
return this.#memoMethod;
}
/**
* {@link LRUCache.OptionsBase.dispose} (read-only)
*/
get dispose() {
return this.#dispose;
}
/**
* {@link LRUCache.OptionsBase.disposeAfter} (read-only)
*/
get disposeAfter() {
return this.#disposeAfter;
}
constructor(options) {
const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
if (max !== 0 && !isPosInt(max)) {
throw new TypeError('max option must be a nonnegative integer');
}
const UintArray = max ? getUintArray(max) : Array;
if (!UintArray) {
throw new Error('invalid max value: ' + max);
}
this.#max = max;
this.#maxSize = maxSize;
this.maxEntrySize = maxEntrySize || this.#maxSize;
this.sizeCalculation = sizeCalculation;
if (this.sizeCalculation) {
if (!this.#maxSize && !this.maxEntrySize) {
throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
}
if (typeof this.sizeCalculation !== 'function') {
throw new TypeError('sizeCalculation set to non-function');
}
}
if (memoMethod !== undefined &&
typeof memoMethod !== 'function') {
throw new TypeError('memoMethod must be a function if defined');
}
this.#memoMethod = memoMethod;
if (fetchMethod !== undefined &&
typeof fetchMethod !== 'function') {
throw new TypeError('fetchMethod must be a function if specified');
}
this.#fetchMethod = fetchMethod;
this.#hasFetchMethod = !!fetchMethod;
this.#keyMap = new Map();
this.#keyList = new Array(max).fill(undefined);
this.#valList = new Array(max).fill(undefined);
this.#next = new UintArray(max);
this.#prev = new UintArray(max);
this.#head = 0;
this.#tail = 0;
this.#free = Stack.create(max);
this.#size = 0;
this.#calculatedSize = 0;
if (typeof dispose === 'function') {
this.#dispose = dispose;
}
if (typeof disposeAfter === 'function') {
this.#disposeAfter = disposeAfter;
this.#disposed = [];
}
else {
this.#disposeAfter = undefined;
this.#disposed = undefined;
}
this.#hasDispose = !!this.#dispose;
this.#hasDisposeAfter = !!this.#disposeAfter;
this.noDisposeOnSet = !!noDisposeOnSet;
this.noUpdateTTL = !!noUpdateTTL;
this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
this.ignoreFetchAbort = !!ignoreFetchAbort;
// NB: maxEntrySize is set to maxSize if it's set
if (this.maxEntrySize !== 0) {
if (this.#maxSize !== 0) {
if (!isPosInt(this.#maxSize)) {
throw new TypeError('maxSize must be a positive integer if specified');
}
}
if (!isPosInt(this.maxEntrySize)) {
throw new TypeError('maxEntrySize must be a positive integer if specified');
}
this.#initializeSizeTracking();
}
this.allowStale = !!allowStale;
this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
this.updateAgeOnGet = !!updateAgeOnGet;
this.updateAgeOnHas = !!updateAgeOnHas;
this.ttlResolution =
isPosInt(ttlResolution) || ttlResolution === 0
? ttlResolution
: 1;
this.ttlAutopurge = !!ttlAutopurge;
this.ttl = ttl || 0;
if (this.ttl) {
if (!isPosInt(this.ttl)) {
throw new TypeError('ttl must be a positive integer if specified');
}
this.#initializeTTLTracking();
}
// do not allow completely unbounded caches
if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
throw new TypeError('At least one of max, maxSize, or ttl is required');
}
if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
const code = 'LRU_CACHE_UNBOUNDED';
if (shouldWarn(code)) {
warned$1.add(code);
const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
'result in unbounded memory consumption.';
emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
}
}
}
/**
* Return the number of ms left in the item's TTL. If item is not in cache,
* returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
*/
getRemainingTTL(key) {
return this.#keyMap.has(key) ? Infinity : 0;
}
#initializeTTLTracking() {
const ttls = new ZeroArray(this.#max);
const starts = new ZeroArray(this.#max);
this.#ttls = ttls;
this.#starts = starts;
this.#setItemTTL = (index, ttl, start = perf.now()) => {
starts[index] = ttl !== 0 ? start : 0;
ttls[index] = ttl;
if (ttl !== 0 && this.ttlAutopurge) {
const t = setTimeout(() => {
if (this.#isStale(index)) {
this.#delete(this.#keyList[index], 'expire');
}
}, ttl + 1);
// unref() not supported on all platforms
/* c8 ignore start */
if (t.unref) {
t.unref();
}
/* c8 ignore stop */
}
};
this.#updateItemAge = index => {
starts[index] = ttls[index] !== 0 ? perf.now() : 0;
};
this.#statusTTL = (status, index) => {
if (ttls[index]) {
const ttl = ttls[index];
const start = starts[index];
/* c8 ignore next */
if (!ttl || !start)
return;
status.ttl = ttl;
status.start = start;
status.now = cachedNow || getNow();
const age = status.now - start;
status.remainingTTL = ttl - age;
}
};
// debounce calls to perf.now() to 1s so we're not hitting
// that costly call repeatedly.
let cachedNow = 0;
const getNow = () => {
const n = perf.now();
if (this.ttlResolution > 0) {
cachedNow = n;
const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
// not available on all platforms
/* c8 ignore start */
if (t.unref) {
t.unref();
}
/* c8 ignore stop */
}
return n;
};
this.getRemainingTTL = key => {
const index = this.#keyMap.get(key);
if (index === undefined) {
return 0;
}
const ttl = ttls[index];
const start = starts[index];
if (!ttl || !start) {
return Infinity;
}
const age = (cachedNow || getNow()) - start;
return ttl - age;
};
this.#isStale = index => {
const s = starts[index];
const t = ttls[index];
return !!t && !!s && (cachedNow || getNow()) - s > t;
};
}
// conditionally set private methods related to TTL
#updateItemAge = () => { };
#statusTTL = () => { };
#setItemTTL = () => { };
/* c8 ignore stop */
#isStale = () => false;
#initializeSizeTracking() {
const sizes = new ZeroArray(this.#max);
this.#calculatedSize = 0;
this.#sizes = sizes;
this.#removeItemSize = index => {
this.#calculatedSize -= sizes[index];
sizes[index] = 0;
};
this.#requireSize = (k, v, size, sizeCalculation) => {
// provisionally accept background fetches.
// actual value size will be checked when they return.
if (this.#isBackgroundFetch(v)) {
return 0;
}
if (!isPosInt(size)) {
if (sizeCalculation) {
if (typeof sizeCalculation !== 'function') {
throw new TypeError('sizeCalculation must be a function');
}
size = sizeCalculation(v, k);
if (!isPosInt(size)) {
throw new TypeError('sizeCalculation return invalid (expect positive integer)');
}
}
else {
throw new TypeError('invalid size value (must be positive integer). ' +
'When maxSize or maxEntrySize is used, sizeCalculation ' +
'or size must be set.');
}
}
return size;
};
this.#addItemSize = (index, size, status) => {
sizes[index] = size;
if (this.#maxSize) {
const maxSize = this.#maxSize - sizes[index];
while (this.#calculatedSize > maxSize) {
this.#evict(true);
}
}
this.#calculatedSize += sizes[index];
if (status) {
status.entrySize = size;
status.totalCalculatedSize = this.#calculatedSize;
}
};
}
#removeItemSize = _i => { };
#addItemSize = (_i, _s, _st) => { };
#requireSize = (_k, _v, size, sizeCalculation) => {
if (size || sizeCalculation) {
throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
}
return 0;
};
*#indexes({ allowStale = this.allowStale } = {}) {
if (this.#size) {
for (let i = this.#tail; true;) {
if (!this.#isValidIndex(i)) {
break;
}
if (allowStale || !this.#isStale(i)) {
yield i;
}
if (i === this.#head) {
break;
}
else {
i = this.#prev[i];
}
}
}
}
*#rindexes({ allowStale = this.allowStale } = {}) {
if (this.#size) {
for (let i = this.#head; true;) {
if (!this.#isValidIndex(i)) {
break;
}
if (allowStale || !this.#isStale(i)) {
yield i;
}
if (i === this.#tail) {
break;
}
else {
i = this.#next[i];
}
}
}
}
#isValidIndex(index) {
return (index !== undefined &&
this.#keyMap.get(this.#keyList[index]) === index);
}
/**
* Return a generator yielding `[key, value]` pairs,
* in order from most recently used to least recently used.
*/
*entries() {
for (const i of this.#indexes()) {
if (this.#valList[i] !== undefined &&
this.#keyList[i] !== undefined &&
!this.#isBackgroundFetch(this.#valList[i])) {
yield [this.#keyList[i], this.#valList[i]];
}
}
}
/**
* Inverse order version of {@link LRUCache.entries}
*
* Return a generator yielding `[key, value]` pairs,
* in order from least recently used to most recently used.
*/
*rentries() {
for (const i of this.#rindexes()) {
if (this.#valList[i] !== undefined &&
this.#keyList[i] !== undefined &&
!this.#isBackgroundFetch(this.#valList[i])) {
yield [this.#keyList[i], this.#valList[i]];
}
}
}
/**
* Return a generator yielding the keys in the cache,
* in order from most recently used to least recently used.
*/
*keys() {
for (const i of this.#indexes()) {
const k = this.#keyList[i];
if (k !== undefined &&
!this.#isBackgroundFetch(this.#valList[i])) {
yield k;
}
}
}
/**
* Inverse order version of {@link LRUCache.keys}
*
* Return a generator yielding the keys in the cache,
* in order from least recently used to most recently used.
*/
*rkeys() {
for (const i of this.#rindexes()) {
const k = this.#keyList[i];
if (k !== undefined &&
!this.#isBackgroundFetch(this.#valList[i])) {
yield k;
}
}
}
/**
* Return a generator yielding the values in the cache,
* in order from most recently used to least recently used.
*/
*values() {
for (const i of this.#indexes()) {
const v = this.#valList[i];
if (v !== undefined &&
!this.#isBackgroundFetch(this.#valList[i])) {
yield this.#valList[i];
}
}
}
/**
* Inverse order version of {@link LRUCache.values}
*
* Return a generator yielding the values in the cache,
* in order from least recently used to most recently used.
*/
*rvalues() {
for (const i of this.#rindexes()) {
const v = this.#valList[i];
if (v !== undefined &&
!this.#isBackgroundFetch(this.#valList[i])) {
yield this.#valList[i];
}
}
}
/**
* Iterating over the cache itself yields the same results as
* {@link LRUCache.entries}
*/
[Symbol.iterator]() {
return this.entries();
}
/**
* A String value that is used in the creation of the default string
* description of an object. Called by the built-in method
* `Object.prototype.toString`.
*/
[Symbol.toStringTag] = 'LRUCache';
/**
* Find a value for which the supplied fn method returns a truthy value,
* similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
*/
find(fn, getOptions = {}) {
for (const i of this.#indexes()) {
const v = this.#valList[i];
const value = this.#isBackgroundFetch(v)
? v.__staleWhileFetching
: v;
if (value === undefined)
continue;
if (fn(value, this.#keyList[i], this)) {
return this.get(this.#keyList[i], getOptions);
}
}
}
/**
* Call the supplied function on each item in the cache, in order from most
* recently used to least recently used.
*
* `fn` is called as `fn(value, key, cache)`.
*
* If `thisp` is provided, function will be called in the `this`-context of
* the provided object, or the cache if no `thisp` object is provided.
*
* Does not update age or recenty of use, or iterate over stale values.
*/
forEach(fn, thisp = this) {
for (const i of this.#indexes()) {
const v = this.#valList[i];
const value = this.#isBackgroundFetch(v)
? v.__staleWhileFetching
: v;
if (value === undefined)
continue;
fn.call(thisp, value, this.#keyList[i], this);
}
}
/**
* The same as {@link LRUCache.forEach} but items are iterated over in
* reverse order. (ie, less recently used items are iterated over first.)
*/
rforEach(fn, thisp = this) {
for (const i of this.#rindexes()) {
const v = this.#valList[i];
const value = this.#isBackgroundFetch(v)
? v.__staleWhileFetching
: v;
if (value === undefined)
continue;
fn.call(thisp, value, this.#keyList[i], this);
}
}
/**
* Delete any stale entries. Returns true if anything was removed,
* false otherwise.
*/
purgeStale() {
let deleted = false;
for (const i of this.#rindexes({ allowStale: true })) {
if (this.#isStale(i)) {
this.#delete(this.#keyList[i], 'expire');
deleted = true;
}
}
return deleted;
}
/**
* Get the extended info about a given entry, to get its value, size, and
* TTL info simultaneously. Returns `undefined` if the key is not present.
*
* Unlike {@link LRUCache#dump}, which is designed to be portable and survive
* serialization, the `start` value is always the current timestamp, and the
* `ttl` is a calculated remaining time to live (negative if expired).
*
* Always returns stale values, if their info is found in the cache, so be
* sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
* if relevant.
*/
info(key) {
const i = this.#keyMap.get(key);
if (i === undefined)
return undefined;
const v = this.#valList[i];
const value = this.#isBackgroundFetch(v)
? v.__staleWhileFetching
: v;
if (value === undefined)
return undefined;
const entry = { value };
if (this.#ttls && this.#starts) {
const ttl = this.#ttls[i];
const start = this.#starts[i];
if (ttl && start) {
const remain = ttl - (perf.now() - start);
entry.ttl = remain;
entry.start = Date.now();
}
}
if (this.#sizes) {
entry.size = this.#sizes[i];
}
return entry;
}
/**
* Return an array of [key, {@link LRUCache.Entry}] tuples which can be
* passed to {@link LRLUCache#load}.
*
* The `start` fields are calculated relative to a portable `Date.now()`
* timestamp, even if `performance.now()` is available.
*
* Stale entries are always included in the `dump`, even if
* {@link LRUCache.OptionsBase.allowStale} is false.
*
* Note: this returns an actual array, not a generator, so it can be more
* easily passed around.
*/
dump() {
const arr = [];
for (const i of this.#indexes({ allowStale: true })) {
const key = this.#keyList[i];
const v = this.#valList[i];
const value = this.#isBackgroundFetch(v)
? v.__staleWhileFetching
: v;
if (value === undefined || key === undefined)
continue;
const entry = { value };
if (this.#ttls && this.#starts) {
entry.ttl = this.#ttls[i];
// always dump the start relative to a portable timestamp
// it's ok for this to be a bit slow, it's a rare operation.
const age = perf.now() - this.#starts[i];
entry.start = Math.floor(Date.now() - age);
}
if (this.#sizes) {
entry.size = this.#sizes[i];
}
arr.unshift([key, entry]);
}
return arr;
}
/**
* Reset the cache and load in the items in entries in the order listed.
*
* The shape of the resulting cache may be different if the same options are
* not used in both caches.
*
* The `start` fields are assumed to be calculated relative to a portable
* `Date.now()` timestamp, even if `performance.now()` is available.
*/
load(arr) {
this.clear();
for (const [key, entry] of arr) {
if (entry.start) {
// entry.start is a portable timestamp, but we may be using
// node's performance.now(), so calculate the offset, so that
// we get the intended remaining TTL, no matter how long it's
// been on ice.
//
// it's ok for this to be a bit slow, it's a rare operation.
const age = Date.now() - entry.start;
entry.start = perf.now() - age;
}
this.set(key, entry.value, entry);
}
}
/**
* Add a value to the cache.
*
* Note: if `undefined` is specified as a value, this is an alias for
* {@link LRUCache#delete}
*
* Fields on the {@link LRUCache.SetOptions} options param will override
* their corresponding values in the constructor options for the scope
* of this single `set()` operation.
*
* If `start` is provided, then that will set the effective start
* time for the TTL calculation. Note that this must be a previous
* value of `performance.now()` if supported, or a previous value of
* `Date.now()` if not.
*
* Options object may also include `size`, which will prevent
* calling the `sizeCalculation` function and just use the specified
* number if it is a positive integer, and `noDisposeOnSet` which
* will prevent calling a `dispose` function in the case of
* overwrites.
*
* If the `size` (or return value of `sizeCalculation`) for a given
* entry is greater than `maxEntrySize`, then the item will not be
* added to the cache.
*
* Will update the recency of the entry.
*
* If the value is `undefined`, then this is an alias for
* `cache.delete(key)`. `undefined` is never stored in the cache.
*/
set(k, v, setOptions = {}) {
if (v === undefined) {
this.delete(k);
return this;
}
const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
let { noUpdateTTL = this.noUpdateTTL } = setOptions;
const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
// if the item doesn't fit, don't do anything
// NB: maxEntrySize set to maxSize by default
if (this.maxEntrySize && size > this.maxEntrySize) {
if (status) {
status.set = 'miss';
status.maxEntrySizeExceeded = true;
}
// have to delete, in case something is there already.
this.#delete(k, 'set');
return this;
}
let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
if (index === undefined) {
// addition
index = (this.#size === 0
? this.#tail
: this.#free.length !== 0
? this.#free.pop()
: this.#size === this.#max
? this.#evict(false)
: this.#size);
this.#keyList[index] = k;
this.#valList[index] = v;
this.#keyMap.set(k, index);
this.#next[this.#tail] = index;
this.#prev[index] = this.#tail;
this.#tail = index;
this.#size++;
this.#addItemSize(index, size, status);
if (status)
status.set = 'add';
noUpdateTTL = false;
}
else {
// update
this.#moveToTail(index);
const oldVal = this.#valList[index];
if (v !== oldVal) {
if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
oldVal.__abortController.abort(new Error('replaced'));
const { __staleWhileFetching: s } = oldVal;
if (s !== undefined && !noDisposeOnSet) {
if (this.#hasDispose) {
this.#dispose?.(s, k, 'set');
}
if (this.#hasDisposeAfter) {
this.#disposed?.push([s, k, 'set']);
}
}
}
else if (!noDisposeOnSet) {
if (this.#hasDispose) {
this.#dispose?.(oldVal, k, 'set');
}
if (this.#hasDisposeAfter) {
this.#disposed?.push([oldVal, k, 'set']);
}
}
this.#removeItemSize(index);
this.#addItemSize(index, size, status);
this.#valList[index] = v;
if (status) {
status.set = 'replace';
const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
? oldVal.__staleWhileFetching
: oldVal;
if (oldValue !== undefined)
status.oldValue = oldValue;
}
}
else if (status) {
status.set = 'update';
}
}
if (ttl !== 0 && !this.#ttls) {
this.#initializeTTLTracking();
}
if (this.#ttls) {
if (!noUpdateTTL) {
this.#setItemTTL(index, ttl, start);
}
if (status)
this.#statusTTL(status, index);
}
if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
const dt = this.#disposed;
let task;
while ((task = dt?.shift())) {
this.#disposeAfter?.(...task);
}
}
return this;
}
/**
* Evict the least recently used item, returning its value or
* `undefined` if cache is empty.
*/
pop() {
try {
while (this.#size) {
const val = this.#valList[this.#head];
this.#evict(true);
if (this.#isBackgroundFetch(val)) {
if (val.__staleWhileFetching) {
return val.__staleWhileFetching;
}
}
else if (val !== undefined) {
return val;
}
}
}
finally {
if (this.#hasDisposeAfter && this.#disposed) {
const dt = this.#disposed;
let task;
while ((task = dt?.shift())) {
this.#disposeAfter?.(...task);
}
}
}
}
#evict(free) {
const head = this.#head;
const k = this.#keyList[head];
const v = this.#valList[head];
if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
v.__abortController.abort(new Error('evicted'));
}
else if (this.#hasDispose || this.#hasDisposeAfter) {
if (this.#hasDispose) {
this.#dispose?.(v, k, 'evict');
}
if (this.#hasDisposeAfter) {
this.#disposed?.push([v, k, 'evict']);
}
}
this.#removeItemSize(head);
// if we aren't about to use the index, then null these out
if (free) {
this.#keyList[head] = undefined;
this.#valList[head] = undefined;
this.#free.push(head);
}
if (this.#size === 1) {
this.#head = this.#tail = 0;
this.#free.length = 0;
}
else {
this.#head = this.#next[head];
}
this.#keyMap.delete(k);
this.#size--;
return head;
}
/**
* Check if a key is in the cache, without updating the recency of use.
* Will return false if the item is stale, even though it is technically
* in the cache.
*
* Check if a key is in the cache, without updating the recency of
* use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
* to `true` in either the options or the constructor.
*
* Will return `false` if the item is stale, even though it is technically in
* the cache. The difference can be determined (if it matters) by using a
* `status` argument, and inspecting the `has` field.
*
* Will not update item age unless
* {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
*/
has(k, hasOptions = {}) {
const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
const index = this.#keyMap.get(k);
if (index !== undefined) {
const v = this.#valList[index];
if (this.#isBackgroundFetch(v) &&
v.__staleWhileFetching === undefined) {
return false;
}
if (!this.#isStale(index)) {
if (updateAgeOnHas) {
this.#updateItemAge(index);
}
if (status) {
status.has = 'hit';
this.#statusTTL(status, index);
}
return true;
}
else if (status) {
status.has = 'stale';
this.#statusTTL(status, index);
}
}
else if (status) {
status.has = 'miss';
}
return false;
}
/**
* Like {@link LRUCache#get} but doesn't update recency or delete stale
* items.
*
* Returns `undefined` if the item is stale, unless
* {@link LRUCache.OptionsBase.allowStale} is set.
*/
peek(k, peekOptions = {}) {
const { allowStale = this.allowStale } = peekOptions;
const index = this.#keyMap.get(k);
if (index === undefined ||
(!allowStale && this.#isStale(index))) {
return;
}
const v = this.#valList[index];
// either stale and allowed, or forcing a refresh of non-stale value
return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
}
#backgroundFetch(k, index, options, context) {
const v = index === undefined ? undefined : this.#valList[index];
if (this.#isBackgroundFetch(v)) {
return v;
}
const ac = new AC();
const { signal } = options;
// when/if our AC signals, then stop listening to theirs.
signal?.addEventListener('abort', () => ac.abort(signal.reason), {
signal: ac.signal,
});
const fetchOpts = {
signal: ac.signal,
options,
context,
};
const cb = (v, updateCache = false) => {
const { aborted } = ac.signal;
const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
if (options.status) {
if (aborted && !updateCache) {
options.status.fetchAborted = true;
options.status.fetchError = ac.signal.reason;
if (ignoreAbort)
options.status.fetchAbortIgnored = true;
}
else {
options.status.fetchResolved = true;
}
}
if (aborted && !ignoreAbort && !updateCache) {
return fetchFail(ac.signal.reason);
}
// either we didn't abort, and are still here, or we did, and ignored
const bf = p;
if (this.#valList[index] === p) {
if (v === undefined) {
if (bf.__staleWhileFetching) {
this.#valList[index] = bf.__staleWhileFetching;
}
else {
this.#delete(k, 'fetch');
}
}
else {
if (options.status)
options.status.fetchUpdated = true;
this.set(k, v, fetchOpts.options);
}
}
return v;
};
const eb = (er) => {
if (options.status) {
options.status.fetchRejected = true;
options.status.fetchError = er;
}
return fetchFail(er);
};
const fetchFail = (er) => {
const { aborted } = ac.signal;
const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
const noDelete = allowStale || options.noDeleteOnFetchRejection;
const bf = p;
if (this.#valList[index] === p) {
// if we allow stale on fetch rejections, then we need to ensure that
// the stale value is not removed from the cache when the fetch fails.
const del = !noDelete || bf.__staleWhileFetching === undefined;
if (del) {
this.#delete(k, 'fetch');
}
else if (!allowStaleAborted) {
// still replace the *promise* with the stale value,
// since we are done with the promise at this point.
// leave it untouched if we're still waiting for an
// aborted background fetch that hasn't yet returned.
this.#valList[index] = bf.__staleWhileFetching;
}
}
if (allowStale) {
if (options.status && bf.__staleWhileFetching !== undefined) {
options.status.returnedStale = true;
}
return bf.__staleWhileFetching;
}
else if (bf.__returned === bf) {
throw er;
}
};
const pcall = (res, rej) => {
const fmp = this.#fetchMethod?.(k, v, fetchOpts);
if (fmp && fmp instanceof Promise) {
fmp.then(v => res(v === undefined ? undefined : v), rej);
}
// ignored, we go until we finish, regardless.
// defer check until we are actually aborting,
// so fetchMethod can override.
ac.signal.addEventListener('abort', () => {
if (!options.ignoreFetchAbort ||
options.allowStaleOnFetchAbort) {
res(undefined);
// when it eventually resolves, update the cache.
if (options.allowStaleOnFetchAbort) {
res = v => cb(v, true);
}
}
});
};
if (options.status)
options.status.fetchDispatched = true;
const p = new Promise(pcall).then(cb, eb);
const bf = Object.assign(p, {
__abortController: ac,
__staleWhileFetching: v,
__returned: undefined,
});
if (index === undefined) {
// internal, don't expose status.
this.set(k, bf, { ...fetchOpts.options, status: undefined });
index = this.#keyMap.get(k);
}
else {
this.#valList[index] = bf;
}
return bf;
}
#isBackgroundFetch(p) {
if (!this.#hasFetchMethod)
return false;
const b = p;
return (!!b &&
b instanceof Promise &&
b.hasOwnProperty('__staleWhileFetching') &&
b.__abortController instanceof AC);
}
async fetch(k, fetchOptions = {}) {
const {
// get options
allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet,
// set options
ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL,
// fetch exclusive options
noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
if (!this.#hasFetchMethod) {
if (status)
status.fetch = 'get';
return this.get(k, {
allowStale,
updateAgeOnGet,
noDeleteOnStaleGet,
status,
});
}
const options = {
allowStale,
updateAgeOnGet,
noDeleteOnStaleGet,
ttl,
noDisposeOnSet,
size,
sizeCalculation,
noUpdateTTL,
noDeleteOnFetchRejection,
allowStaleOnFetchRejection,
allowStaleOnFetchAbort,
ignoreFetchAbort,
status,
signal,
};
let index = this.#keyMap.get(k);
if (index === undefined) {
if (status)
status.fetch = 'miss';
const p = this.#backgroundFetch(k, index, options, context);
return (p.__returned = p);
}
else {
// in cache, maybe already fetching
const v = this.#valList[index];
if (this.#isBackgroundFetch(v)) {
const stale = allowStale && v.__staleWhileFetching !== undefined;
if (status) {
status.fetch = 'inflight';
if (stale)
status.returnedStale = true;
}
return stale ? v.__staleWhileFetching : (v.__returned = v);
}
// if we force a refresh, that means do NOT serve the cached value,
// unless we are already in the process of refreshing the cache.
const isStale = this.#isStale(index);
if (!forceRefresh && !isStale) {
if (status)
status.fetch = 'hit';
this.#moveToTail(index);
if (updateAgeOnGet) {
this.#updateItemAge(index);
}
if (status)
this.#statusTTL(status, index);
return v;
}
// ok, it is stale or a forced refresh, and not already fetching.
// refresh the cache.
const p = this.#backgroundFetch(k, index, options, context);
const hasStale = p.__staleWhileFetching !== undefined;
const staleVal = hasStale && allowStale;
if (status) {
status.fetch = isStale ? 'stale' : 'refresh';
if (staleVal && isStale)
status.returnedStale = true;
}
return staleVal ? p.__staleWhileFetching : (p.__returned = p);
}
}
async forceFetch(k, fetchOptions = {}) {
const v = await this.fetch(k, fetchOptions);
if (v === undefined)
throw new Error('fetch() returned undefined');
return v;
}
memo(k, memoOptions = {}) {
const memoMethod = this.#memoMethod;
if (!memoMethod) {
throw new Error('no memoMethod provided to constructor');
}
const { context, forceRefresh, ...options } = memoOptions;
const v = this.get(k, options);
if (!forceRefresh && v !== undefined)
return v;
const vv = memoMethod(k, v, {
options,
context,
});
this.set(k, vv, options);
return vv;
}
/**
* Return a value from the cache. Will update the recency of the cache
* entry found.
*
* If the key is not found, get() will return `undefined`.
*/
get(k, getOptions = {}) {
const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
const index = this.#keyMap.get(k);
if (index !== undefined) {
const value = this.#valList[index];
const fetching = this.#isBackgroundFetch(value);
if (status)
this.#statusTTL(status, index);
if (this.#isStale(index)) {
if (status)
status.get = 'stale';
// delete only if not an in-flight background fetch
if (!fetching) {
if (!noDeleteOnStaleGet) {
this.#delete(k, 'expire');
}
if (status && allowStale)
status.returnedStale = true;
return allowStale ? value : undefined;
}
else {
if (status &&
allowStale &&
value.__staleWhileFetching !== undefined) {
status.returnedStale = true;
}
return allowStale ? value.__staleWhileFetching : undefined;
}
}
else {
if (status)
status.get = 'hit';
// if we're currently fetching it, we don't actually have it yet
// it's not stale, which means this isn't a staleWhileRefetching.
// If it's not stale, and fetching, AND has a __staleWhileFetching
// value, then that means the user fetched with {forceRefresh:true},
// so it's safe to return that value.
if (fetching) {
return value.__staleWhileFetching;
}
this.#moveToTail(index);
if (updateAgeOnGet) {
this.#updateItemAge(index);
}
return value;
}
}
else if (status) {
status.get = 'miss';
}
}
#connect(p, n) {
this.#prev[n] = p;
this.#next[p] = n;
}
#moveToTail(index) {
// if tail already, nothing to do
// if head, move head to next[index]
// else
// move next[prev[index]] to next[index] (head has no prev)
// move prev[next[index]] to prev[index]
// prev[index] = tail
// next[tail] = index
// tail = index
if (index !== this.#tail) {
if (index === this.#head) {
this.#head = this.#next[index];
}
else {
this.#connect(this.#prev[index], this.#next[index]);
}
this.#connect(this.#tail, index);
this.#tail = index;
}
}
/**
* Deletes a key out of the cache.
*
* Returns true if the key was deleted, false otherwise.
*/
delete(k) {
return this.#delete(k, 'delete');
}
#delete(k, reason) {
let deleted = false;
if (this.#size !== 0) {
const index = this.#keyMap.get(k);
if (index !== undefined) {
deleted = true;
if (this.#size === 1) {
this.#clear(reason);
}
else {
this.#removeItemSize(index);
const v = this.#valList[index];
if (this.#isBackgroundFetch(v)) {
v.__abortController.abort(new Error('deleted'));
}
else if (this.#hasDispose || this.#hasDisposeAfter) {
if (this.#hasDispose) {
this.#dispose?.(v, k, reason);
}
if (this.#hasDisposeAfter) {
this.#disposed?.push([v, k, reason]);
}
}
this.#keyMap.delete(k);
this.#keyList[index] = undefined;
this.#valList[index] = undefined;
if (index === this.#tail) {
this.#tail = this.#prev[index];
}
else if (index === this.#head) {
this.#head = this.#next[index];
}
else {
const pi = this.#prev[index];
this.#next[pi] = this.#next[index];
const ni = this.#next[index];
this.#prev[ni] = this.#prev[index];
}
this.#size--;
this.#free.push(index);
}
}
}
if (this.#hasDisposeAfter && this.#disposed?.length) {
const dt = this.#disposed;
let task;
while ((task = dt?.shift())) {
this.#disposeAfter?.(...task);
}
}
return deleted;
}
/**
* Clear the cache entirely, throwing away all values.
*/
clear() {
return this.#clear('delete');
}
#clear(reason) {
for (const index of this.#rindexes({ allowStale: true })) {
const v = this.#valList[index];
if (this.#isBackgroundFetch(v)) {
v.__abortController.abort(new Error('deleted'));
}
else {
const k = this.#keyList[index];
if (this.#hasDispose) {
this.#dispose?.(v, k, reason);
}
if (this.#hasDisposeAfter) {
this.#disposed?.push([v, k, reason]);
}
}
}
this.#keyMap.clear();
this.#valList.fill(undefined);
this.#keyList.fill(undefined);
if (this.#ttls && this.#starts) {
this.#ttls.fill(0);
this.#starts.fill(0);
}
if (this.#sizes) {
this.#sizes.fill(0);
}
this.#head = 0;
this.#tail = 0;
this.#free.length = 0;
this.#calculatedSize = 0;
this.#size = 0;
if (this.#hasDisposeAfter && this.#disposed) {
const dt = this.#disposed;
let task;
while ((task = dt?.shift())) {
this.#disposeAfter?.(...task);
}
}
}
}
const proc = typeof process === 'object' && process
? process
: {
stdout: null,
stderr: null,
};
/**
* Return true if the argument is a Minipass stream, Node stream, or something
* else that Minipass can interact with.
*/
const isStream = (s) => !!s &&
typeof s === 'object' &&
(s instanceof Minipass ||
s instanceof Stream$1 ||
isReadable(s) ||
isWritable(s));
/**
* Return true if the argument is a valid {@link Minipass.Readable}
*/
const isReadable = (s) => !!s &&
typeof s === 'object' &&
s instanceof EventEmitter$4 &&
typeof s.pipe === 'function' &&
// node core Writable streams have a pipe() method, but it throws
s.pipe !== Stream$1.Writable.prototype.pipe;
/**
* Return true if the argument is a valid {@link Minipass.Writable}
*/
const isWritable = (s) => !!s &&
typeof s === 'object' &&
s instanceof EventEmitter$4 &&
typeof s.write === 'function' &&
typeof s.end === 'function';
const EOF = Symbol('EOF');
const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
const EMITTED_END = Symbol('emittedEnd');
const EMITTING_END = Symbol('emittingEnd');
const EMITTED_ERROR = Symbol('emittedError');
const CLOSED$1 = Symbol('closed');
const READ = Symbol('read');
const FLUSH = Symbol('flush');
const FLUSHCHUNK = Symbol('flushChunk');
const ENCODING$1 = Symbol('encoding');
const DECODER = Symbol('decoder');
const FLOWING = Symbol('flowing');
const PAUSED = Symbol('paused');
const RESUME = Symbol('resume');
const BUFFER = Symbol('buffer');
const PIPES = Symbol('pipes');
const BUFFERLENGTH = Symbol('bufferLength');
const BUFFERPUSH = Symbol('bufferPush');
const BUFFERSHIFT = Symbol('bufferShift');
const OBJECTMODE = Symbol('objectMode');
// internal event when stream is destroyed
const DESTROYED = Symbol('destroyed');
// internal event when stream has an error
const ERROR = Symbol('error');
const EMITDATA = Symbol('emitData');
const EMITEND = Symbol('emitEnd');
const EMITEND2 = Symbol('emitEnd2');
const ASYNC = Symbol('async');
const ABORT = Symbol('abort');
const ABORTED = Symbol('aborted');
const SIGNAL = Symbol('signal');
const DATALISTENERS = Symbol('dataListeners');
const DISCARDED = Symbol('discarded');
const defer$3 = (fn) => Promise.resolve().then(fn);
const nodefer = (fn) => fn();
const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
(!!b &&
typeof b === 'object' &&
b.constructor &&
b.constructor.name === 'ArrayBuffer' &&
b.byteLength >= 0);
const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
/**
* Internal class representing a pipe to a destination stream.
*
* @internal
*/
class Pipe {
src;
dest;
opts;
ondrain;
constructor(src, dest, opts) {
this.src = src;
this.dest = dest;
this.opts = opts;
this.ondrain = () => src[RESUME]();
this.dest.on('drain', this.ondrain);
}
unpipe() {
this.dest.removeListener('drain', this.ondrain);
}
// only here for the prototype
/* c8 ignore start */
proxyErrors(_er) { }
/* c8 ignore stop */
end() {
this.unpipe();
if (this.opts.end)
this.dest.end();
}
}
/**
* Internal class representing a pipe to a destination stream where
* errors are proxied.
*
* @internal
*/
class PipeProxyErrors extends Pipe {
unpipe() {
this.src.removeListener('error', this.proxyErrors);
super.unpipe();
}
constructor(src, dest, opts) {
super(src, dest, opts);
this.proxyErrors = er => dest.emit('error', er);
src.on('error', this.proxyErrors);
}
}
const isObjectModeOptions = (o) => !!o.objectMode;
const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
/**
* Main export, the Minipass class
*
* `RType` is the type of data emitted, defaults to Buffer
*
* `WType` is the type of data to be written, if RType is buffer or string,
* then any {@link Minipass.ContiguousData} is allowed.
*
* `Events` is the set of event handler signatures that this object
* will emit, see {@link Minipass.Events}
*/
class Minipass extends EventEmitter$4 {
[FLOWING] = false;
[PAUSED] = false;
[PIPES] = [];
[BUFFER] = [];
[OBJECTMODE];
[ENCODING$1];
[ASYNC];
[DECODER];
[EOF] = false;
[EMITTED_END] = false;
[EMITTING_END] = false;
[CLOSED$1] = false;
[EMITTED_ERROR] = null;
[BUFFERLENGTH] = 0;
[DESTROYED] = false;
[SIGNAL];
[ABORTED] = false;
[DATALISTENERS] = 0;
[DISCARDED] = false;
/**
* true if the stream can be written
*/
writable = true;
/**
* true if the stream can be read
*/
readable = true;
/**
* If `RType` is Buffer, then options do not need to be provided.
* Otherwise, an options object must be provided to specify either
* {@link Minipass.SharedOptions.objectMode} or
* {@link Minipass.SharedOptions.encoding}, as appropriate.
*/
constructor(...args) {
const options = (args[0] ||
{});
super();
if (options.objectMode && typeof options.encoding === 'string') {
throw new TypeError('Encoding and objectMode may not be used together');
}
if (isObjectModeOptions(options)) {
this[OBJECTMODE] = true;
this[ENCODING$1] = null;
}
else if (isEncodingOptions(options)) {
this[ENCODING$1] = options.encoding;
this[OBJECTMODE] = false;
}
else {
this[OBJECTMODE] = false;
this[ENCODING$1] = null;
}
this[ASYNC] = !!options.async;
this[DECODER] = this[ENCODING$1]
? new StringDecoder(this[ENCODING$1])
: null;
//@ts-ignore - private option for debugging and testing
if (options && options.debugExposeBuffer === true) {
Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
}
//@ts-ignore - private option for debugging and testing
if (options && options.debugExposePipes === true) {
Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
}
const { signal } = options;
if (signal) {
this[SIGNAL] = signal;
if (signal.aborted) {
this[ABORT]();
}
else {
signal.addEventListener('abort', () => this[ABORT]());
}
}
}
/**
* The amount of data stored in the buffer waiting to be read.
*
* For Buffer strings, this will be the total byte length.
* For string encoding streams, this will be the string character length,
* according to JavaScript's `string.length` logic.
* For objectMode streams, this is a count of the items waiting to be
* emitted.
*/
get bufferLength() {
return this[BUFFERLENGTH];
}
/**
* The `BufferEncoding` currently in use, or `null`
*/
get encoding() {
return this[ENCODING$1];
}
/**
* @deprecated - This is a read only property
*/
set encoding(_enc) {
throw new Error('Encoding must be set at instantiation time');
}
/**
* @deprecated - Encoding may only be set at instantiation time
*/
setEncoding(_enc) {
throw new Error('Encoding must be set at instantiation time');
}
/**
* True if this is an objectMode stream
*/
get objectMode() {
return this[OBJECTMODE];
}
/**
* @deprecated - This is a read-only property
*/
set objectMode(_om) {
throw new Error('objectMode must be set at instantiation time');
}
/**
* true if this is an async stream
*/
get ['async']() {
return this[ASYNC];
}
/**
* Set to true to make this stream async.
*
* Once set, it cannot be unset, as this would potentially cause incorrect
* behavior. Ie, a sync stream can be made async, but an async stream
* cannot be safely made sync.
*/
set ['async'](a) {
this[ASYNC] = this[ASYNC] || !!a;
}
// drop everything and get out of the flow completely
[ABORT]() {
this[ABORTED] = true;
this.emit('abort', this[SIGNAL]?.reason);
this.destroy(this[SIGNAL]?.reason);
}
/**
* True if the stream has been aborted.
*/
get aborted() {
return this[ABORTED];
}
/**
* No-op setter. Stream aborted status is set via the AbortSignal provided
* in the constructor options.
*/
set aborted(_) { }
write(chunk, encoding, cb) {
if (this[ABORTED])
return false;
if (this[EOF])
throw new Error('write after end');
if (this[DESTROYED]) {
this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
return true;
}
if (typeof encoding === 'function') {
cb = encoding;
encoding = 'utf8';
}
if (!encoding)
encoding = 'utf8';
const fn = this[ASYNC] ? defer$3 : nodefer;
// convert array buffers and typed array views into buffers
// at some point in the future, we may want to do the opposite!
// leave strings and buffers as-is
// anything is only allowed if in object mode, so throw
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
if (isArrayBufferView(chunk)) {
//@ts-ignore - sinful unsafe type changing
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
}
else if (isArrayBufferLike(chunk)) {
//@ts-ignore - sinful unsafe type changing
chunk = Buffer.from(chunk);
}
else if (typeof chunk !== 'string') {
throw new Error('Non-contiguous data written to non-objectMode stream');
}
}
// handle object mode up front, since it's simpler
// this yields better performance, fewer checks later.
if (this[OBJECTMODE]) {
// maybe impossible?
/* c8 ignore start */
if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
this[FLUSH](true);
/* c8 ignore stop */
if (this[FLOWING])
this.emit('data', chunk);
else
this[BUFFERPUSH](chunk);
if (this[BUFFERLENGTH] !== 0)
this.emit('readable');
if (cb)
fn(cb);
return this[FLOWING];
}
// at this point the chunk is a buffer or string
// don't buffer it up or send it to the decoder
if (!chunk.length) {
if (this[BUFFERLENGTH] !== 0)
this.emit('readable');
if (cb)
fn(cb);
return this[FLOWING];
}
// fast-path writing strings of same encoding to a stream with
// an empty buffer, skipping the buffer/decoder dance
if (typeof chunk === 'string' &&
// unless it is a string already ready for us to use
!(encoding === this[ENCODING$1] && !this[DECODER]?.lastNeed)) {
//@ts-ignore - sinful unsafe type change
chunk = Buffer.from(chunk, encoding);
}
if (Buffer.isBuffer(chunk) && this[ENCODING$1]) {
//@ts-ignore - sinful unsafe type change
chunk = this[DECODER].write(chunk);
}
// Note: flushing CAN potentially switch us into not-flowing mode
if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
this[FLUSH](true);
if (this[FLOWING])
this.emit('data', chunk);
else
this[BUFFERPUSH](chunk);
if (this[BUFFERLENGTH] !== 0)
this.emit('readable');
if (cb)
fn(cb);
return this[FLOWING];
}
/**
* Low-level explicit read method.
*
* In objectMode, the argument is ignored, and one item is returned if
* available.
*
* `n` is the number of bytes (or in the case of encoding streams,
* characters) to consume. If `n` is not provided, then the entire buffer
* is returned, or `null` is returned if no data is available.
*
* If `n` is greater that the amount of data in the internal buffer,
* then `null` is returned.
*/
read(n) {
if (this[DESTROYED])
return null;
this[DISCARDED] = false;
if (this[BUFFERLENGTH] === 0 ||
n === 0 ||
(n && n > this[BUFFERLENGTH])) {
this[MAYBE_EMIT_END]();
return null;
}
if (this[OBJECTMODE])
n = null;
if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
// not object mode, so if we have an encoding, then RType is string
// otherwise, must be Buffer
this[BUFFER] = [
(this[ENCODING$1]
? this[BUFFER].join('')
: Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
];
}
const ret = this[READ](n || null, this[BUFFER][0]);
this[MAYBE_EMIT_END]();
return ret;
}
[READ](n, chunk) {
if (this[OBJECTMODE])
this[BUFFERSHIFT]();
else {
const c = chunk;
if (n === c.length || n === null)
this[BUFFERSHIFT]();
else if (typeof c === 'string') {
this[BUFFER][0] = c.slice(n);
chunk = c.slice(0, n);
this[BUFFERLENGTH] -= n;
}
else {
this[BUFFER][0] = c.subarray(n);
chunk = c.subarray(0, n);
this[BUFFERLENGTH] -= n;
}
}
this.emit('data', chunk);
if (!this[BUFFER].length && !this[EOF])
this.emit('drain');
return chunk;
}
end(chunk, encoding, cb) {
if (typeof chunk === 'function') {
cb = chunk;
chunk = undefined;
}
if (typeof encoding === 'function') {
cb = encoding;
encoding = 'utf8';
}
if (chunk !== undefined)
this.write(chunk, encoding);
if (cb)
this.once('end', cb);
this[EOF] = true;
this.writable = false;
// if we haven't written anything, then go ahead and emit,
// even if we're not reading.
// we'll re-emit if a new 'end' listener is added anyway.
// This makes MP more suitable to write-only use cases.
if (this[FLOWING] || !this[PAUSED])
this[MAYBE_EMIT_END]();
return this;
}
// don't let the internal resume be overwritten
[RESUME]() {
if (this[DESTROYED])
return;
if (!this[DATALISTENERS] && !this[PIPES].length) {
this[DISCARDED] = true;
}
this[PAUSED] = false;
this[FLOWING] = true;
this.emit('resume');
if (this[BUFFER].length)
this[FLUSH]();
else if (this[EOF])
this[MAYBE_EMIT_END]();
else
this.emit('drain');
}
/**
* Resume the stream if it is currently in a paused state
*
* If called when there are no pipe destinations or `data` event listeners,
* this will place the stream in a "discarded" state, where all data will
* be thrown away. The discarded state is removed if a pipe destination or
* data handler is added, if pause() is called, or if any synchronous or
* asynchronous iteration is started.
*/
resume() {
return this[RESUME]();
}
/**
* Pause the stream
*/
pause() {
this[FLOWING] = false;
this[PAUSED] = true;
this[DISCARDED] = false;
}
/**
* true if the stream has been forcibly destroyed
*/
get destroyed() {
return this[DESTROYED];
}
/**
* true if the stream is currently in a flowing state, meaning that
* any writes will be immediately emitted.
*/
get flowing() {
return this[FLOWING];
}
/**
* true if the stream is currently in a paused state
*/
get paused() {
return this[PAUSED];
}
[BUFFERPUSH](chunk) {
if (this[OBJECTMODE])
this[BUFFERLENGTH] += 1;
else
this[BUFFERLENGTH] += chunk.length;
this[BUFFER].push(chunk);
}
[BUFFERSHIFT]() {
if (this[OBJECTMODE])
this[BUFFERLENGTH] -= 1;
else
this[BUFFERLENGTH] -= this[BUFFER][0].length;
return this[BUFFER].shift();
}
[FLUSH](noDrain = false) {
do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
this[BUFFER].length);
if (!noDrain && !this[BUFFER].length && !this[EOF])
this.emit('drain');
}
[FLUSHCHUNK](chunk) {
this.emit('data', chunk);
return this[FLOWING];
}
/**
* Pipe all data emitted by this stream into the destination provided.
*
* Triggers the flow of data.
*/
pipe(dest, opts) {
if (this[DESTROYED])
return dest;
this[DISCARDED] = false;
const ended = this[EMITTED_END];
opts = opts || {};
if (dest === proc.stdout || dest === proc.stderr)
opts.end = false;
else
opts.end = opts.end !== false;
opts.proxyErrors = !!opts.proxyErrors;
// piping an ended stream ends immediately
if (ended) {
if (opts.end)
dest.end();
}
else {
// "as" here just ignores the WType, which pipes don't care about,
// since they're only consuming from us, and writing to the dest
this[PIPES].push(!opts.proxyErrors
? new Pipe(this, dest, opts)
: new PipeProxyErrors(this, dest, opts));
if (this[ASYNC])
defer$3(() => this[RESUME]());
else
this[RESUME]();
}
return dest;
}
/**
* Fully unhook a piped destination stream.
*
* If the destination stream was the only consumer of this stream (ie,
* there are no other piped destinations or `'data'` event listeners)
* then the flow of data will stop until there is another consumer or
* {@link Minipass#resume} is explicitly called.
*/
unpipe(dest) {
const p = this[PIPES].find(p => p.dest === dest);
if (p) {
if (this[PIPES].length === 1) {
if (this[FLOWING] && this[DATALISTENERS] === 0) {
this[FLOWING] = false;
}
this[PIPES] = [];
}
else
this[PIPES].splice(this[PIPES].indexOf(p), 1);
p.unpipe();
}
}
/**
* Alias for {@link Minipass#on}
*/
addListener(ev, handler) {
return this.on(ev, handler);
}
/**
* Mostly identical to `EventEmitter.on`, with the following
* behavior differences to prevent data loss and unnecessary hangs:
*
* - Adding a 'data' event handler will trigger the flow of data
*
* - Adding a 'readable' event handler when there is data waiting to be read
* will cause 'readable' to be emitted immediately.
*
* - Adding an 'endish' event handler ('end', 'finish', etc.) which has
* already passed will cause the event to be emitted immediately and all
* handlers removed.
*
* - Adding an 'error' event handler after an error has been emitted will
* cause the event to be re-emitted immediately with the error previously
* raised.
*/
on(ev, handler) {
const ret = super.on(ev, handler);
if (ev === 'data') {
this[DISCARDED] = false;
this[DATALISTENERS]++;
if (!this[PIPES].length && !this[FLOWING]) {
this[RESUME]();
}
}
else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
super.emit('readable');
}
else if (isEndish(ev) && this[EMITTED_END]) {
super.emit(ev);
this.removeAllListeners(ev);
}
else if (ev === 'error' && this[EMITTED_ERROR]) {
const h = handler;
if (this[ASYNC])
defer$3(() => h.call(this, this[EMITTED_ERROR]));
else
h.call(this, this[EMITTED_ERROR]);
}
return ret;
}
/**
* Alias for {@link Minipass#off}
*/
removeListener(ev, handler) {
return this.off(ev, handler);
}
/**
* Mostly identical to `EventEmitter.off`
*
* If a 'data' event handler is removed, and it was the last consumer
* (ie, there are no pipe destinations or other 'data' event listeners),
* then the flow of data will stop until there is another consumer or
* {@link Minipass#resume} is explicitly called.
*/
off(ev, handler) {
const ret = super.off(ev, handler);
// if we previously had listeners, and now we don't, and we don't
// have any pipes, then stop the flow, unless it's been explicitly
// put in a discarded flowing state via stream.resume().
if (ev === 'data') {
this[DATALISTENERS] = this.listeners('data').length;
if (this[DATALISTENERS] === 0 &&
!this[DISCARDED] &&
!this[PIPES].length) {
this[FLOWING] = false;
}
}
return ret;
}
/**
* Mostly identical to `EventEmitter.removeAllListeners`
*
* If all 'data' event handlers are removed, and they were the last consumer
* (ie, there are no pipe destinations), then the flow of data will stop
* until there is another consumer or {@link Minipass#resume} is explicitly
* called.
*/
removeAllListeners(ev) {
const ret = super.removeAllListeners(ev);
if (ev === 'data' || ev === undefined) {
this[DATALISTENERS] = 0;
if (!this[DISCARDED] && !this[PIPES].length) {
this[FLOWING] = false;
}
}
return ret;
}
/**
* true if the 'end' event has been emitted
*/
get emittedEnd() {
return this[EMITTED_END];
}
[MAYBE_EMIT_END]() {
if (!this[EMITTING_END] &&
!this[EMITTED_END] &&
!this[DESTROYED] &&
this[BUFFER].length === 0 &&
this[EOF]) {
this[EMITTING_END] = true;
this.emit('end');
this.emit('prefinish');
this.emit('finish');
if (this[CLOSED$1])
this.emit('close');
this[EMITTING_END] = false;
}
}
/**
* Mostly identical to `EventEmitter.emit`, with the following
* behavior differences to prevent data loss and unnecessary hangs:
*
* If the stream has been destroyed, and the event is something other
* than 'close' or 'error', then `false` is returned and no handlers
* are called.
*
* If the event is 'end', and has already been emitted, then the event
* is ignored. If the stream is in a paused or non-flowing state, then
* the event will be deferred until data flow resumes. If the stream is
* async, then handlers will be called on the next tick rather than
* immediately.
*
* If the event is 'close', and 'end' has not yet been emitted, then
* the event will be deferred until after 'end' is emitted.
*
* If the event is 'error', and an AbortSignal was provided for the stream,
* and there are no listeners, then the event is ignored, matching the
* behavior of node core streams in the presense of an AbortSignal.
*
* If the event is 'finish' or 'prefinish', then all listeners will be
* removed after emitting the event, to prevent double-firing.
*/
emit(ev, ...args) {
const data = args[0];
// error and close are only events allowed after calling destroy()
if (ev !== 'error' &&
ev !== 'close' &&
ev !== DESTROYED &&
this[DESTROYED]) {
return false;
}
else if (ev === 'data') {
return !this[OBJECTMODE] && !data
? false
: this[ASYNC]
? (defer$3(() => this[EMITDATA](data)), true)
: this[EMITDATA](data);
}
else if (ev === 'end') {
return this[EMITEND]();
}
else if (ev === 'close') {
this[CLOSED$1] = true;
// don't emit close before 'end' and 'finish'
if (!this[EMITTED_END] && !this[DESTROYED])
return false;
const ret = super.emit('close');
this.removeAllListeners('close');
return ret;
}
else if (ev === 'error') {
this[EMITTED_ERROR] = data;
super.emit(ERROR, data);
const ret = !this[SIGNAL] || this.listeners('error').length
? super.emit('error', data)
: false;
this[MAYBE_EMIT_END]();
return ret;
}
else if (ev === 'resume') {
const ret = super.emit('resume');
this[MAYBE_EMIT_END]();
return ret;
}
else if (ev === 'finish' || ev === 'prefinish') {
const ret = super.emit(ev);
this.removeAllListeners(ev);
return ret;
}
// Some other unknown event
const ret = super.emit(ev, ...args);
this[MAYBE_EMIT_END]();
return ret;
}
[EMITDATA](data) {
for (const p of this[PIPES]) {
if (p.dest.write(data) === false)
this.pause();
}
const ret = this[DISCARDED] ? false : super.emit('data', data);
this[MAYBE_EMIT_END]();
return ret;
}
[EMITEND]() {
if (this[EMITTED_END])
return false;
this[EMITTED_END] = true;
this.readable = false;
return this[ASYNC]
? (defer$3(() => this[EMITEND2]()), true)
: this[EMITEND2]();
}
[EMITEND2]() {
if (this[DECODER]) {
const data = this[DECODER].end();
if (data) {
for (const p of this[PIPES]) {
p.dest.write(data);
}
if (!this[DISCARDED])
super.emit('data', data);
}
}
for (const p of this[PIPES]) {
p.end();
}
const ret = super.emit('end');
this.removeAllListeners('end');
return ret;
}
/**
* Return a Promise that resolves to an array of all emitted data once
* the stream ends.
*/
async collect() {
const buf = Object.assign([], {
dataLength: 0,
});
if (!this[OBJECTMODE])
buf.dataLength = 0;
// set the promise first, in case an error is raised
// by triggering the flow here.
const p = this.promise();
this.on('data', c => {
buf.push(c);
if (!this[OBJECTMODE])
buf.dataLength += c.length;
});
await p;
return buf;
}
/**
* Return a Promise that resolves to the concatenation of all emitted data
* once the stream ends.
*
* Not allowed on objectMode streams.
*/
async concat() {
if (this[OBJECTMODE]) {
throw new Error('cannot concat in objectMode');
}
const buf = await this.collect();
return (this[ENCODING$1]
? buf.join('')
: Buffer.concat(buf, buf.dataLength));
}
/**
* Return a void Promise that resolves once the stream ends.
*/
async promise() {
return new Promise((resolve, reject) => {
this.on(DESTROYED, () => reject(new Error('stream destroyed')));
this.on('error', er => reject(er));
this.on('end', () => resolve());
});
}
/**
* Asynchronous `for await of` iteration.
*
* This will continue emitting all chunks until the stream terminates.
*/
[Symbol.asyncIterator]() {
// set this up front, in case the consumer doesn't call next()
// right away.
this[DISCARDED] = false;
let stopped = false;
const stop = async () => {
this.pause();
stopped = true;
return { value: undefined, done: true };
};
const next = () => {
if (stopped)
return stop();
const res = this.read();
if (res !== null)
return Promise.resolve({ done: false, value: res });
if (this[EOF])
return stop();
let resolve;
let reject;
const onerr = (er) => {
this.off('data', ondata);
this.off('end', onend);
this.off(DESTROYED, ondestroy);
stop();
reject(er);
};
const ondata = (value) => {
this.off('error', onerr);
this.off('end', onend);
this.off(DESTROYED, ondestroy);
this.pause();
resolve({ value, done: !!this[EOF] });
};
const onend = () => {
this.off('error', onerr);
this.off('data', ondata);
this.off(DESTROYED, ondestroy);
stop();
resolve({ done: true, value: undefined });
};
const ondestroy = () => onerr(new Error('stream destroyed'));
return new Promise((res, rej) => {
reject = rej;
resolve = res;
this.once(DESTROYED, ondestroy);
this.once('error', onerr);
this.once('end', onend);
this.once('data', ondata);
});
};
return {
next,
throw: stop,
return: stop,
[Symbol.asyncIterator]() {
return this;
},
};
}
/**
* Synchronous `for of` iteration.
*
* The iteration will terminate when the internal buffer runs out, even
* if the stream has not yet terminated.
*/
[Symbol.iterator]() {
// set this up front, in case the consumer doesn't call next()
// right away.
this[DISCARDED] = false;
let stopped = false;
const stop = () => {
this.pause();
this.off(ERROR, stop);
this.off(DESTROYED, stop);
this.off('end', stop);
stopped = true;
return { done: true, value: undefined };
};
const next = () => {
if (stopped)
return stop();
const value = this.read();
return value === null ? stop() : { done: false, value };
};
this.once('end', stop);
this.once(ERROR, stop);
this.once(DESTROYED, stop);
return {
next,
throw: stop,
return: stop,
[Symbol.iterator]() {
return this;
},
};
}
/**
* Destroy a stream, preventing it from being used for any further purpose.
*
* If the stream has a `close()` method, then it will be called on
* destruction.
*
* After destruction, any attempt to write data, read data, or emit most
* events will be ignored.
*
* If an error argument is provided, then it will be emitted in an
* 'error' event.
*/
destroy(er) {
if (this[DESTROYED]) {
if (er)
this.emit('error', er);
else
this.emit(DESTROYED);
return this;
}
this[DESTROYED] = true;
this[DISCARDED] = true;
// throw away all buffered data, it's never coming out
this[BUFFER].length = 0;
this[BUFFERLENGTH] = 0;
const wc = this;
if (typeof wc.close === 'function' && !this[CLOSED$1])
wc.close();
if (er)
this.emit('error', er);
// if no error to emit, still reject pending promises
else
this.emit(DESTROYED);
return this;
}
/**
* Alias for {@link isStream}
*
* Former export location, maintained for backwards compatibility.
*
* @deprecated
*/
static get isStream() {
return isStream;
}
}
const realpathSync = realpathSync$1.native;
const defaultFS = {
lstatSync,
readdir: readdir$4,
readdirSync,
readlinkSync,
realpathSync,
promises: {
lstat: lstat$3,
readdir: readdir$5,
readlink,
realpath: realpath$2,
},
};
// if they just gave us require('fs') then use our default
const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === fs$j ?
defaultFS
: {
...defaultFS,
...fsOption,
promises: {
...defaultFS.promises,
...(fsOption.promises || {}),
},
};
// turn something like //?/c:/ into c:\
const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
// windows paths are separated by either / or \
const eitherSep = /[\\\/]/;
const UNKNOWN = 0; // may not even exist, for all we know
const IFIFO = 0b0001;
const IFCHR = 0b0010;
const IFDIR = 0b0100;
const IFBLK = 0b0110;
const IFREG = 0b1000;
const IFLNK = 0b1010;
const IFSOCK = 0b1100;
const IFMT = 0b1111;
// mask to unset low 4 bits
const IFMT_UNKNOWN = ~IFMT;
// set after successfully calling readdir() and getting entries.
const READDIR_CALLED = 0b0000_0001_0000;
// set after a successful lstat()
const LSTAT_CALLED = 0b0000_0010_0000;
// set if an entry (or one of its parents) is definitely not a dir
const ENOTDIR = 0b0000_0100_0000;
// set if an entry (or one of its parents) does not exist
// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
const ENOENT = 0b0000_1000_0000;
// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
// set if we fail to readlink
const ENOREADLINK = 0b0001_0000_0000;
// set if we know realpath() will fail
const ENOREALPATH = 0b0010_0000_0000;
const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
const TYPEMASK = 0b0011_1111_1111;
const entToType = (s) => s.isFile() ? IFREG
: s.isDirectory() ? IFDIR
: s.isSymbolicLink() ? IFLNK
: s.isCharacterDevice() ? IFCHR
: s.isBlockDevice() ? IFBLK
: s.isSocket() ? IFSOCK
: s.isFIFO() ? IFIFO
: UNKNOWN;
// normalize unicode path names
const normalizeCache = new Map();
const normalize = (s) => {
const c = normalizeCache.get(s);
if (c)
return c;
const n = s.normalize('NFKD');
normalizeCache.set(s, n);
return n;
};
const normalizeNocaseCache = new Map();
const normalizeNocase = (s) => {
const c = normalizeNocaseCache.get(s);
if (c)
return c;
const n = normalize(s.toLowerCase());
normalizeNocaseCache.set(s, n);
return n;
};
/**
* An LRUCache for storing resolved path strings or Path objects.
* @internal
*/
class ResolveCache extends LRUCache {
constructor() {
super({ max: 256 });
}
}
// In order to prevent blowing out the js heap by allocating hundreds of
// thousands of Path entries when walking extremely large trees, the "children"
// in this tree are represented by storing an array of Path entries in an
// LRUCache, indexed by the parent. At any time, Path.children() may return an
// empty array, indicating that it doesn't know about any of its children, and
// thus has to rebuild that cache. This is fine, it just means that we don't
// benefit as much from having the cached entries, but huge directory walks
// don't blow out the stack, and smaller ones are still as fast as possible.
//
//It does impose some complexity when building up the readdir data, because we
//need to pass a reference to the children array that we started with.
/**
* an LRUCache for storing child entries.
* @internal
*/
class ChildrenCache extends LRUCache {
constructor(maxSize = 16 * 1024) {
super({
maxSize,
// parent + children
sizeCalculation: a => a.length + 1,
});
}
}
const setAsCwd = Symbol('PathScurry setAsCwd');
/**
* Path objects are sort of like a super-powered
* {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
*
* Each one represents a single filesystem entry on disk, which may or may not
* exist. It includes methods for reading various types of information via
* lstat, readlink, and readdir, and caches all information to the greatest
* degree possible.
*
* Note that fs operations that would normally throw will instead return an
* "empty" value. This is in order to prevent excessive overhead from error
* stack traces.
*/
class PathBase {
/**
* the basename of this path
*
* **Important**: *always* test the path name against any test string
* usingthe {@link isNamed} method, and not by directly comparing this
* string. Otherwise, unicode path strings that the system sees as identical
* will not be properly treated as the same path, leading to incorrect
* behavior and possible security issues.
*/
name;
/**
* the Path entry corresponding to the path root.
*
* @internal
*/
root;
/**
* All roots found within the current PathScurry family
*
* @internal
*/
roots;
/**
* a reference to the parent path, or undefined in the case of root entries
*
* @internal
*/
parent;
/**
* boolean indicating whether paths are compared case-insensitively
* @internal
*/
nocase;
/**
* boolean indicating that this path is the current working directory
* of the PathScurry collection that contains it.
*/
isCWD = false;
// potential default fs override
#fs;
// Stats fields
#dev;
get dev() {
return this.#dev;
}
#mode;
get mode() {
return this.#mode;
}
#nlink;
get nlink() {
return this.#nlink;
}
#uid;
get uid() {
return this.#uid;
}
#gid;
get gid() {
return this.#gid;
}
#rdev;
get rdev() {
return this.#rdev;
}
#blksize;
get blksize() {
return this.#blksize;
}
#ino;
get ino() {
return this.#ino;
}
#size;
get size() {
return this.#size;
}
#blocks;
get blocks() {
return this.#blocks;
}
#atimeMs;
get atimeMs() {
return this.#atimeMs;
}
#mtimeMs;
get mtimeMs() {
return this.#mtimeMs;
}
#ctimeMs;
get ctimeMs() {
return this.#ctimeMs;
}
#birthtimeMs;
get birthtimeMs() {
return this.#birthtimeMs;
}
#atime;
get atime() {
return this.#atime;
}
#mtime;
get mtime() {
return this.#mtime;
}
#ctime;
get ctime() {
return this.#ctime;
}
#birthtime;
get birthtime() {
return this.#birthtime;
}
#matchName;
#depth;
#fullpath;
#fullpathPosix;
#relative;
#relativePosix;
#type;
#children;
#linkTarget;
#realpath;
/**
* This property is for compatibility with the Dirent class as of
* Node v20, where Dirent['parentPath'] refers to the path of the
* directory that was passed to readdir. For root entries, it's the path
* to the entry itself.
*/
get parentPath() {
return (this.parent || this).fullpath();
}
/**
* Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
* this property refers to the *parent* path, not the path object itself.
*/
get path() {
return this.parentPath;
}
/**
* Do not create new Path objects directly. They should always be accessed
* via the PathScurry class or other methods on the Path class.
*
* @internal
*/
constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
this.name = name;
this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
this.#type = type & TYPEMASK;
this.nocase = nocase;
this.roots = roots;
this.root = root || this;
this.#children = children;
this.#fullpath = opts.fullpath;
this.#relative = opts.relative;
this.#relativePosix = opts.relativePosix;
this.parent = opts.parent;
if (this.parent) {
this.#fs = this.parent.#fs;
}
else {
this.#fs = fsFromOption(opts.fs);
}
}
/**
* Returns the depth of the Path object from its root.
*
* For example, a path at `/foo/bar` would have a depth of 2.
*/
depth() {
if (this.#depth !== undefined)
return this.#depth;
if (!this.parent)
return (this.#depth = 0);
return (this.#depth = this.parent.depth() + 1);
}
/**
* @internal
*/
childrenCache() {
return this.#children;
}
/**
* Get the Path object referenced by the string path, resolved from this Path
*/
resolve(path) {
if (!path) {
return this;
}
const rootPath = this.getRootString(path);
const dir = path.substring(rootPath.length);
const dirParts = dir.split(this.splitSep);
const result = rootPath ?
this.getRoot(rootPath).#resolveParts(dirParts)
: this.#resolveParts(dirParts);
return result;
}
#resolveParts(dirParts) {
let p = this;
for (const part of dirParts) {
p = p.child(part);
}
return p;
}
/**
* Returns the cached children Path objects, if still available. If they
* have fallen out of the cache, then returns an empty array, and resets the
* READDIR_CALLED bit, so that future calls to readdir() will require an fs
* lookup.
*
* @internal
*/
children() {
const cached = this.#children.get(this);
if (cached) {
return cached;
}
const children = Object.assign([], { provisional: 0 });
this.#children.set(this, children);
this.#type &= ~READDIR_CALLED;
return children;
}
/**
* Resolves a path portion and returns or creates the child Path.
*
* Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
* `'..'`.
*
* This should not be called directly. If `pathPart` contains any path
* separators, it will lead to unsafe undefined behavior.
*
* Use `Path.resolve()` instead.
*
* @internal
*/
child(pathPart, opts) {
if (pathPart === '' || pathPart === '.') {
return this;
}
if (pathPart === '..') {
return this.parent || this;
}
// find the child
const children = this.children();
const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
for (const p of children) {
if (p.#matchName === name) {
return p;
}
}
// didn't find it, create provisional child, since it might not
// actually exist. If we know the parent isn't a dir, then
// in fact it CAN'T exist.
const s = this.parent ? this.sep : '';
const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
const pchild = this.newChild(pathPart, UNKNOWN, {
...opts,
parent: this,
fullpath,
});
if (!this.canReaddir()) {
pchild.#type |= ENOENT;
}
// don't have to update provisional, because if we have real children,
// then provisional is set to children.length, otherwise a lower number
children.push(pchild);
return pchild;
}
/**
* The relative path from the cwd. If it does not share an ancestor with
* the cwd, then this ends up being equivalent to the fullpath()
*/
relative() {
if (this.isCWD)
return '';
if (this.#relative !== undefined) {
return this.#relative;
}
const name = this.name;
const p = this.parent;
if (!p) {
return (this.#relative = this.name);
}
const pv = p.relative();
return pv + (!pv || !p.parent ? '' : this.sep) + name;
}
/**
* The relative path from the cwd, using / as the path separator.
* If it does not share an ancestor with
* the cwd, then this ends up being equivalent to the fullpathPosix()
* On posix systems, this is identical to relative().
*/
relativePosix() {
if (this.sep === '/')
return this.relative();
if (this.isCWD)
return '';
if (this.#relativePosix !== undefined)
return this.#relativePosix;
const name = this.name;
const p = this.parent;
if (!p) {
return (this.#relativePosix = this.fullpathPosix());
}
const pv = p.relativePosix();
return pv + (!pv || !p.parent ? '' : '/') + name;
}
/**
* The fully resolved path string for this Path entry
*/
fullpath() {
if (this.#fullpath !== undefined) {
return this.#fullpath;
}
const name = this.name;
const p = this.parent;
if (!p) {
return (this.#fullpath = this.name);
}
const pv = p.fullpath();
const fp = pv + (!p.parent ? '' : this.sep) + name;
return (this.#fullpath = fp);
}
/**
* On platforms other than windows, this is identical to fullpath.
*
* On windows, this is overridden to return the forward-slash form of the
* full UNC path.
*/
fullpathPosix() {
if (this.#fullpathPosix !== undefined)
return this.#fullpathPosix;
if (this.sep === '/')
return (this.#fullpathPosix = this.fullpath());
if (!this.parent) {
const p = this.fullpath().replace(/\\/g, '/');
if (/^[a-z]:\//i.test(p)) {
return (this.#fullpathPosix = `//?/${p}`);
}
else {
return (this.#fullpathPosix = p);
}
}
const p = this.parent;
const pfpp = p.fullpathPosix();
const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
return (this.#fullpathPosix = fpp);
}
/**
* Is the Path of an unknown type?
*
* Note that we might know *something* about it if there has been a previous
* filesystem operation, for example that it does not exist, or is not a
* link, or whether it has child entries.
*/
isUnknown() {
return (this.#type & IFMT) === UNKNOWN;
}
isType(type) {
return this[`is${type}`]();
}
getType() {
return (this.isUnknown() ? 'Unknown'
: this.isDirectory() ? 'Directory'
: this.isFile() ? 'File'
: this.isSymbolicLink() ? 'SymbolicLink'
: this.isFIFO() ? 'FIFO'
: this.isCharacterDevice() ? 'CharacterDevice'
: this.isBlockDevice() ? 'BlockDevice'
: /* c8 ignore start */ this.isSocket() ? 'Socket'
: 'Unknown');
/* c8 ignore stop */
}
/**
* Is the Path a regular file?
*/
isFile() {
return (this.#type & IFMT) === IFREG;
}
/**
* Is the Path a directory?
*/
isDirectory() {
return (this.#type & IFMT) === IFDIR;
}
/**
* Is the path a character device?
*/
isCharacterDevice() {
return (this.#type & IFMT) === IFCHR;
}
/**
* Is the path a block device?
*/
isBlockDevice() {
return (this.#type & IFMT) === IFBLK;
}
/**
* Is the path a FIFO pipe?
*/
isFIFO() {
return (this.#type & IFMT) === IFIFO;
}
/**
* Is the path a socket?
*/
isSocket() {
return (this.#type & IFMT) === IFSOCK;
}
/**
* Is the path a symbolic link?
*/
isSymbolicLink() {
return (this.#type & IFLNK) === IFLNK;
}
/**
* Return the entry if it has been subject of a successful lstat, or
* undefined otherwise.
*
* Does not read the filesystem, so an undefined result *could* simply
* mean that we haven't called lstat on it.
*/
lstatCached() {
return this.#type & LSTAT_CALLED ? this : undefined;
}
/**
* Return the cached link target if the entry has been the subject of a
* successful readlink, or undefined otherwise.
*
* Does not read the filesystem, so an undefined result *could* just mean we
* don't have any cached data. Only use it if you are very sure that a
* readlink() has been called at some point.
*/
readlinkCached() {
return this.#linkTarget;
}
/**
* Returns the cached realpath target if the entry has been the subject
* of a successful realpath, or undefined otherwise.
*
* Does not read the filesystem, so an undefined result *could* just mean we
* don't have any cached data. Only use it if you are very sure that a
* realpath() has been called at some point.
*/
realpathCached() {
return this.#realpath;
}
/**
* Returns the cached child Path entries array if the entry has been the
* subject of a successful readdir(), or [] otherwise.
*
* Does not read the filesystem, so an empty array *could* just mean we
* don't have any cached data. Only use it if you are very sure that a
* readdir() has been called recently enough to still be valid.
*/
readdirCached() {
const children = this.children();
return children.slice(0, children.provisional);
}
/**
* Return true if it's worth trying to readlink. Ie, we don't (yet) have
* any indication that readlink will definitely fail.
*
* Returns false if the path is known to not be a symlink, if a previous
* readlink failed, or if the entry does not exist.
*/
canReadlink() {
if (this.#linkTarget)
return true;
if (!this.parent)
return false;
// cases where it cannot possibly succeed
const ifmt = this.#type & IFMT;
return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
this.#type & ENOREADLINK ||
this.#type & ENOENT);
}
/**
* Return true if readdir has previously been successfully called on this
* path, indicating that cachedReaddir() is likely valid.
*/
calledReaddir() {
return !!(this.#type & READDIR_CALLED);
}
/**
* Returns true if the path is known to not exist. That is, a previous lstat
* or readdir failed to verify its existence when that would have been
* expected, or a parent entry was marked either enoent or enotdir.
*/
isENOENT() {
return !!(this.#type & ENOENT);
}
/**
* Return true if the path is a match for the given path name. This handles
* case sensitivity and unicode normalization.
*
* Note: even on case-sensitive systems, it is **not** safe to test the
* equality of the `.name` property to determine whether a given pathname
* matches, due to unicode normalization mismatches.
*
* Always use this method instead of testing the `path.name` property
* directly.
*/
isNamed(n) {
return !this.nocase ?
this.#matchName === normalize(n)
: this.#matchName === normalizeNocase(n);
}
/**
* Return the Path object corresponding to the target of a symbolic link.
*
* If the Path is not a symbolic link, or if the readlink call fails for any
* reason, `undefined` is returned.
*
* Result is cached, and thus may be outdated if the filesystem is mutated.
*/
async readlink() {
const target = this.#linkTarget;
if (target) {
return target;
}
if (!this.canReadlink()) {
return undefined;
}
/* c8 ignore start */
// already covered by the canReadlink test, here for ts grumples
if (!this.parent) {
return undefined;
}
/* c8 ignore stop */
try {
const read = await this.#fs.promises.readlink(this.fullpath());
const linkTarget = (await this.parent.realpath())?.resolve(read);
if (linkTarget) {
return (this.#linkTarget = linkTarget);
}
}
catch (er) {
this.#readlinkFail(er.code);
return undefined;
}
}
/**
* Synchronous {@link PathBase.readlink}
*/
readlinkSync() {
const target = this.#linkTarget;
if (target) {
return target;
}
if (!this.canReadlink()) {
return undefined;
}
/* c8 ignore start */
// already covered by the canReadlink test, here for ts grumples
if (!this.parent) {
return undefined;
}
/* c8 ignore stop */
try {
const read = this.#fs.readlinkSync(this.fullpath());
const linkTarget = this.parent.realpathSync()?.resolve(read);
if (linkTarget) {
return (this.#linkTarget = linkTarget);
}
}
catch (er) {
this.#readlinkFail(er.code);
return undefined;
}
}
#readdirSuccess(children) {
// succeeded, mark readdir called bit
this.#type |= READDIR_CALLED;
// mark all remaining provisional children as ENOENT
for (let p = children.provisional; p < children.length; p++) {
const c = children[p];
if (c)
c.#markENOENT();
}
}
#markENOENT() {
// mark as UNKNOWN and ENOENT
if (this.#type & ENOENT)
return;
this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
this.#markChildrenENOENT();
}
#markChildrenENOENT() {
// all children are provisional and do not exist
const children = this.children();
children.provisional = 0;
for (const p of children) {
p.#markENOENT();
}
}
#markENOREALPATH() {
this.#type |= ENOREALPATH;
this.#markENOTDIR();
}
// save the information when we know the entry is not a dir
#markENOTDIR() {
// entry is not a directory, so any children can't exist.
// this *should* be impossible, since any children created
// after it's been marked ENOTDIR should be marked ENOENT,
// so it won't even get to this point.
/* c8 ignore start */
if (this.#type & ENOTDIR)
return;
/* c8 ignore stop */
let t = this.#type;
// this could happen if we stat a dir, then delete it,
// then try to read it or one of its children.
if ((t & IFMT) === IFDIR)
t &= IFMT_UNKNOWN;
this.#type = t | ENOTDIR;
this.#markChildrenENOENT();
}
#readdirFail(code = '') {
// markENOTDIR and markENOENT also set provisional=0
if (code === 'ENOTDIR' || code === 'EPERM') {
this.#markENOTDIR();
}
else if (code === 'ENOENT') {
this.#markENOENT();
}
else {
this.children().provisional = 0;
}
}
#lstatFail(code = '') {
// Windows just raises ENOENT in this case, disable for win CI
/* c8 ignore start */
if (code === 'ENOTDIR') {
// already know it has a parent by this point
const p = this.parent;
p.#markENOTDIR();
}
else if (code === 'ENOENT') {
/* c8 ignore stop */
this.#markENOENT();
}
}
#readlinkFail(code = '') {
let ter = this.#type;
ter |= ENOREADLINK;
if (code === 'ENOENT')
ter |= ENOENT;
// windows gets a weird error when you try to readlink a file
if (code === 'EINVAL' || code === 'UNKNOWN') {
// exists, but not a symlink, we don't know WHAT it is, so remove
// all IFMT bits.
ter &= IFMT_UNKNOWN;
}
this.#type = ter;
// windows just gets ENOENT in this case. We do cover the case,
// just disabled because it's impossible on Windows CI
/* c8 ignore start */
if (code === 'ENOTDIR' && this.parent) {
this.parent.#markENOTDIR();
}
/* c8 ignore stop */
}
#readdirAddChild(e, c) {
return (this.#readdirMaybePromoteChild(e, c) ||
this.#readdirAddNewChild(e, c));
}
#readdirAddNewChild(e, c) {
// alloc new entry at head, so it's never provisional
const type = entToType(e);
const child = this.newChild(e.name, type, { parent: this });
const ifmt = child.#type & IFMT;
if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
child.#type |= ENOTDIR;
}
c.unshift(child);
c.provisional++;
return child;
}
#readdirMaybePromoteChild(e, c) {
for (let p = c.provisional; p < c.length; p++) {
const pchild = c[p];
const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
if (name !== pchild.#matchName) {
continue;
}
return this.#readdirPromoteChild(e, pchild, p, c);
}
}
#readdirPromoteChild(e, p, index, c) {
const v = p.name;
// retain any other flags, but set ifmt from dirent
p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
// case sensitivity fixing when we learn the true name.
if (v !== e.name)
p.name = e.name;
// just advance provisional index (potentially off the list),
// otherwise we have to splice/pop it out and re-insert at head
if (index !== c.provisional) {
if (index === c.length - 1)
c.pop();
else
c.splice(index, 1);
c.unshift(p);
}
c.provisional++;
return p;
}
/**
* Call lstat() on this Path, and update all known information that can be
* determined.
*
* Note that unlike `fs.lstat()`, the returned value does not contain some
* information, such as `mode`, `dev`, `nlink`, and `ino`. If that
* information is required, you will need to call `fs.lstat` yourself.
*
* If the Path refers to a nonexistent file, or if the lstat call fails for
* any reason, `undefined` is returned. Otherwise the updated Path object is
* returned.
*
* Results are cached, and thus may be out of date if the filesystem is
* mutated.
*/
async lstat() {
if ((this.#type & ENOENT) === 0) {
try {
this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
return this;
}
catch (er) {
this.#lstatFail(er.code);
}
}
}
/**
* synchronous {@link PathBase.lstat}
*/
lstatSync() {
if ((this.#type & ENOENT) === 0) {
try {
this.#applyStat(this.#fs.lstatSync(this.fullpath()));
return this;
}
catch (er) {
this.#lstatFail(er.code);
}
}
}
#applyStat(st) {
const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
this.#atime = atime;
this.#atimeMs = atimeMs;
this.#birthtime = birthtime;
this.#birthtimeMs = birthtimeMs;
this.#blksize = blksize;
this.#blocks = blocks;
this.#ctime = ctime;
this.#ctimeMs = ctimeMs;
this.#dev = dev;
this.#gid = gid;
this.#ino = ino;
this.#mode = mode;
this.#mtime = mtime;
this.#mtimeMs = mtimeMs;
this.#nlink = nlink;
this.#rdev = rdev;
this.#size = size;
this.#uid = uid;
const ifmt = entToType(st);
// retain any other flags, but set the ifmt
this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
this.#type |= ENOTDIR;
}
}
#onReaddirCB = [];
#readdirCBInFlight = false;
#callOnReaddirCB(children) {
this.#readdirCBInFlight = false;
const cbs = this.#onReaddirCB.slice();
this.#onReaddirCB.length = 0;
cbs.forEach(cb => cb(null, children));
}
/**
* Standard node-style callback interface to get list of directory entries.
*
* If the Path cannot or does not contain any children, then an empty array
* is returned.
*
* Results are cached, and thus may be out of date if the filesystem is
* mutated.
*
* @param cb The callback called with (er, entries). Note that the `er`
* param is somewhat extraneous, as all readdir() errors are handled and
* simply result in an empty set of entries being returned.
* @param allowZalgo Boolean indicating that immediately known results should
* *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
* zalgo at your peril, the dark pony lord is devious and unforgiving.
*/
readdirCB(cb, allowZalgo = false) {
if (!this.canReaddir()) {
if (allowZalgo)
cb(null, []);
else
queueMicrotask(() => cb(null, []));
return;
}
const children = this.children();
if (this.calledReaddir()) {
const c = children.slice(0, children.provisional);
if (allowZalgo)
cb(null, c);
else
queueMicrotask(() => cb(null, c));
return;
}
// don't have to worry about zalgo at this point.
this.#onReaddirCB.push(cb);
if (this.#readdirCBInFlight) {
return;
}
this.#readdirCBInFlight = true;
// else read the directory, fill up children
// de-provisionalize any provisional children.
const fullpath = this.fullpath();
this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
if (er) {
this.#readdirFail(er.code);
children.provisional = 0;
}
else {
// if we didn't get an error, we always get entries.
//@ts-ignore
for (const e of entries) {
this.#readdirAddChild(e, children);
}
this.#readdirSuccess(children);
}
this.#callOnReaddirCB(children.slice(0, children.provisional));
return;
});
}
#asyncReaddirInFlight;
/**
* Return an array of known child entries.
*
* If the Path cannot or does not contain any children, then an empty array
* is returned.
*
* Results are cached, and thus may be out of date if the filesystem is
* mutated.
*/
async readdir() {
if (!this.canReaddir()) {
return [];
}
const children = this.children();
if (this.calledReaddir()) {
return children.slice(0, children.provisional);
}
// else read the directory, fill up children
// de-provisionalize any provisional children.
const fullpath = this.fullpath();
if (this.#asyncReaddirInFlight) {
await this.#asyncReaddirInFlight;
}
else {
/* c8 ignore start */
let resolve = () => { };
/* c8 ignore stop */
this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
try {
for (const e of await this.#fs.promises.readdir(fullpath, {
withFileTypes: true,
})) {
this.#readdirAddChild(e, children);
}
this.#readdirSuccess(children);
}
catch (er) {
this.#readdirFail(er.code);
children.provisional = 0;
}
this.#asyncReaddirInFlight = undefined;
resolve();
}
return children.slice(0, children.provisional);
}
/**
* synchronous {@link PathBase.readdir}
*/
readdirSync() {
if (!this.canReaddir()) {
return [];
}
const children = this.children();
if (this.calledReaddir()) {
return children.slice(0, children.provisional);
}
// else read the directory, fill up children
// de-provisionalize any provisional children.
const fullpath = this.fullpath();
try {
for (const e of this.#fs.readdirSync(fullpath, {
withFileTypes: true,
})) {
this.#readdirAddChild(e, children);
}
this.#readdirSuccess(children);
}
catch (er) {
this.#readdirFail(er.code);
children.provisional = 0;
}
return children.slice(0, children.provisional);
}
canReaddir() {
if (this.#type & ENOCHILD)
return false;
const ifmt = IFMT & this.#type;
// we always set ENOTDIR when setting IFMT, so should be impossible
/* c8 ignore start */
if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
return false;
}
/* c8 ignore stop */
return true;
}
shouldWalk(dirs, walkFilter) {
return ((this.#type & IFDIR) === IFDIR &&
!(this.#type & ENOCHILD) &&
!dirs.has(this) &&
(!walkFilter || walkFilter(this)));
}
/**
* Return the Path object corresponding to path as resolved
* by realpath(3).
*
* If the realpath call fails for any reason, `undefined` is returned.
*
* Result is cached, and thus may be outdated if the filesystem is mutated.
* On success, returns a Path object.
*/
async realpath() {
if (this.#realpath)
return this.#realpath;
if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
return undefined;
try {
const rp = await this.#fs.promises.realpath(this.fullpath());
return (this.#realpath = this.resolve(rp));
}
catch (_) {
this.#markENOREALPATH();
}
}
/**
* Synchronous {@link realpath}
*/
realpathSync() {
if (this.#realpath)
return this.#realpath;
if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
return undefined;
try {
const rp = this.#fs.realpathSync(this.fullpath());
return (this.#realpath = this.resolve(rp));
}
catch (_) {
this.#markENOREALPATH();
}
}
/**
* Internal method to mark this Path object as the scurry cwd,
* called by {@link PathScurry#chdir}
*
* @internal
*/
[setAsCwd](oldCwd) {
if (oldCwd === this)
return;
oldCwd.isCWD = false;
this.isCWD = true;
const changed = new Set([]);
let rp = [];
let p = this;
while (p && p.parent) {
changed.add(p);
p.#relative = rp.join(this.sep);
p.#relativePosix = rp.join('/');
p = p.parent;
rp.push('..');
}
// now un-memoize parents of old cwd
p = oldCwd;
while (p && p.parent && !changed.has(p)) {
p.#relative = undefined;
p.#relativePosix = undefined;
p = p.parent;
}
}
}
/**
* Path class used on win32 systems
*
* Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
* as the path separator for parsing paths.
*/
class PathWin32 extends PathBase {
/**
* Separator for generating path strings.
*/
sep = '\\';
/**
* Separator for parsing path strings.
*/
splitSep = eitherSep;
/**
* Do not create new Path objects directly. They should always be accessed
* via the PathScurry class or other methods on the Path class.
*
* @internal
*/
constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
super(name, type, root, roots, nocase, children, opts);
}
/**
* @internal
*/
newChild(name, type = UNKNOWN, opts = {}) {
return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
}
/**
* @internal
*/
getRootString(path) {
return win32$1.parse(path).root;
}
/**
* @internal
*/
getRoot(rootPath) {
rootPath = uncToDrive(rootPath.toUpperCase());
if (rootPath === this.root.name) {
return this.root;
}
// ok, not that one, check if it matches another we know about
for (const [compare, root] of Object.entries(this.roots)) {
if (this.sameRoot(rootPath, compare)) {
return (this.roots[rootPath] = root);
}
}
// otherwise, have to create a new one.
return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
}
/**
* @internal
*/
sameRoot(rootPath, compare = this.root.name) {
// windows can (rarely) have case-sensitive filesystem, but
// UNC and drive letters are always case-insensitive, and canonically
// represented uppercase.
rootPath = rootPath
.toUpperCase()
.replace(/\//g, '\\')
.replace(uncDriveRegexp, '$1\\');
return rootPath === compare;
}
}
/**
* Path class used on all posix systems.
*
* Uses `'/'` as the path separator.
*/
class PathPosix extends PathBase {
/**
* separator for parsing path strings
*/
splitSep = '/';
/**
* separator for generating path strings
*/
sep = '/';
/**
* Do not create new Path objects directly. They should always be accessed
* via the PathScurry class or other methods on the Path class.
*
* @internal
*/
constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
super(name, type, root, roots, nocase, children, opts);
}
/**
* @internal
*/
getRootString(path) {
return path.startsWith('/') ? '/' : '';
}
/**
* @internal
*/
getRoot(_rootPath) {
return this.root;
}
/**
* @internal
*/
newChild(name, type = UNKNOWN, opts = {}) {
return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
}
}
/**
* The base class for all PathScurry classes, providing the interface for path
* resolution and filesystem operations.
*
* Typically, you should *not* instantiate this class directly, but rather one
* of the platform-specific classes, or the exported {@link PathScurry} which
* defaults to the current platform.
*/
class PathScurryBase {
/**
* The root Path entry for the current working directory of this Scurry
*/
root;
/**
* The string path for the root of this Scurry's current working directory
*/
rootPath;
/**
* A collection of all roots encountered, referenced by rootPath
*/
roots;
/**
* The Path entry corresponding to this PathScurry's current working directory.
*/
cwd;
#resolveCache;
#resolvePosixCache;
#children;
/**
* Perform path comparisons case-insensitively.
*
* Defaults true on Darwin and Windows systems, false elsewhere.
*/
nocase;
#fs;
/**
* This class should not be instantiated directly.
*
* Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
*
* @internal
*/
constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
this.#fs = fsFromOption(fs);
if (cwd instanceof URL || cwd.startsWith('file://')) {
cwd = fileURLToPath(cwd);
}
// resolve and split root, and then add to the store.
// this is the only time we call path.resolve()
const cwdPath = pathImpl.resolve(cwd);
this.roots = Object.create(null);
this.rootPath = this.parseRootPath(cwdPath);
this.#resolveCache = new ResolveCache();
this.#resolvePosixCache = new ResolveCache();
this.#children = new ChildrenCache(childrenCacheSize);
const split = cwdPath.substring(this.rootPath.length).split(sep);
// resolve('/') leaves '', splits to [''], we don't want that.
if (split.length === 1 && !split[0]) {
split.pop();
}
/* c8 ignore start */
if (nocase === undefined) {
throw new TypeError('must provide nocase setting to PathScurryBase ctor');
}
/* c8 ignore stop */
this.nocase = nocase;
this.root = this.newRoot(this.#fs);
this.roots[this.rootPath] = this.root;
let prev = this.root;
let len = split.length - 1;
const joinSep = pathImpl.sep;
let abs = this.rootPath;
let sawFirst = false;
for (const part of split) {
const l = len--;
prev = prev.child(part, {
relative: new Array(l).fill('..').join(joinSep),
relativePosix: new Array(l).fill('..').join('/'),
fullpath: (abs += (sawFirst ? '' : joinSep) + part),
});
sawFirst = true;
}
this.cwd = prev;
}
/**
* Get the depth of a provided path, string, or the cwd
*/
depth(path = this.cwd) {
if (typeof path === 'string') {
path = this.cwd.resolve(path);
}
return path.depth();
}
/**
* Return the cache of child entries. Exposed so subclasses can create
* child Path objects in a platform-specific way.
*
* @internal
*/
childrenCache() {
return this.#children;
}
/**
* Resolve one or more path strings to a resolved string
*
* Same interface as require('path').resolve.
*
* Much faster than path.resolve() when called multiple times for the same
* path, because the resolved Path objects are cached. Much slower
* otherwise.
*/
resolve(...paths) {
// first figure out the minimum number of paths we have to test
// we always start at cwd, but any absolutes will bump the start
let r = '';
for (let i = paths.length - 1; i >= 0; i--) {
const p = paths[i];
if (!p || p === '.')
continue;
r = r ? `${p}/${r}` : p;
if (this.isAbsolute(p)) {
break;
}
}
const cached = this.#resolveCache.get(r);
if (cached !== undefined) {
return cached;
}
const result = this.cwd.resolve(r).fullpath();
this.#resolveCache.set(r, result);
return result;
}
/**
* Resolve one or more path strings to a resolved string, returning
* the posix path. Identical to .resolve() on posix systems, but on
* windows will return a forward-slash separated UNC path.
*
* Same interface as require('path').resolve.
*
* Much faster than path.resolve() when called multiple times for the same
* path, because the resolved Path objects are cached. Much slower
* otherwise.
*/
resolvePosix(...paths) {
// first figure out the minimum number of paths we have to test
// we always start at cwd, but any absolutes will bump the start
let r = '';
for (let i = paths.length - 1; i >= 0; i--) {
const p = paths[i];
if (!p || p === '.')
continue;
r = r ? `${p}/${r}` : p;
if (this.isAbsolute(p)) {
break;
}
}
const cached = this.#resolvePosixCache.get(r);
if (cached !== undefined) {
return cached;
}
const result = this.cwd.resolve(r).fullpathPosix();
this.#resolvePosixCache.set(r, result);
return result;
}
/**
* find the relative path from the cwd to the supplied path string or entry
*/
relative(entry = this.cwd) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
return entry.relative();
}
/**
* find the relative path from the cwd to the supplied path string or
* entry, using / as the path delimiter, even on Windows.
*/
relativePosix(entry = this.cwd) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
return entry.relativePosix();
}
/**
* Return the basename for the provided string or Path object
*/
basename(entry = this.cwd) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
return entry.name;
}
/**
* Return the dirname for the provided string or Path object
*/
dirname(entry = this.cwd) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
return (entry.parent || entry).fullpath();
}
async readdir(entry = this.cwd, opts = {
withFileTypes: true,
}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
opts = entry;
entry = this.cwd;
}
const { withFileTypes } = opts;
if (!entry.canReaddir()) {
return [];
}
else {
const p = await entry.readdir();
return withFileTypes ? p : p.map(e => e.name);
}
}
readdirSync(entry = this.cwd, opts = {
withFileTypes: true,
}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
opts = entry;
entry = this.cwd;
}
const { withFileTypes = true } = opts;
if (!entry.canReaddir()) {
return [];
}
else if (withFileTypes) {
return entry.readdirSync();
}
else {
return entry.readdirSync().map(e => e.name);
}
}
/**
* Call lstat() on the string or Path object, and update all known
* information that can be determined.
*
* Note that unlike `fs.lstat()`, the returned value does not contain some
* information, such as `mode`, `dev`, `nlink`, and `ino`. If that
* information is required, you will need to call `fs.lstat` yourself.
*
* If the Path refers to a nonexistent file, or if the lstat call fails for
* any reason, `undefined` is returned. Otherwise the updated Path object is
* returned.
*
* Results are cached, and thus may be out of date if the filesystem is
* mutated.
*/
async lstat(entry = this.cwd) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
return entry.lstat();
}
/**
* synchronous {@link PathScurryBase.lstat}
*/
lstatSync(entry = this.cwd) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
return entry.lstatSync();
}
async readlink(entry = this.cwd, { withFileTypes } = {
withFileTypes: false,
}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
withFileTypes = entry.withFileTypes;
entry = this.cwd;
}
const e = await entry.readlink();
return withFileTypes ? e : e?.fullpath();
}
readlinkSync(entry = this.cwd, { withFileTypes } = {
withFileTypes: false,
}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
withFileTypes = entry.withFileTypes;
entry = this.cwd;
}
const e = entry.readlinkSync();
return withFileTypes ? e : e?.fullpath();
}
async realpath(entry = this.cwd, { withFileTypes } = {
withFileTypes: false,
}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
withFileTypes = entry.withFileTypes;
entry = this.cwd;
}
const e = await entry.realpath();
return withFileTypes ? e : e?.fullpath();
}
realpathSync(entry = this.cwd, { withFileTypes } = {
withFileTypes: false,
}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
withFileTypes = entry.withFileTypes;
entry = this.cwd;
}
const e = entry.realpathSync();
return withFileTypes ? e : e?.fullpath();
}
async walk(entry = this.cwd, opts = {}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
opts = entry;
entry = this.cwd;
}
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
const results = [];
if (!filter || filter(entry)) {
results.push(withFileTypes ? entry : entry.fullpath());
}
const dirs = new Set();
const walk = (dir, cb) => {
dirs.add(dir);
dir.readdirCB((er, entries) => {
/* c8 ignore start */
if (er) {
return cb(er);
}
/* c8 ignore stop */
let len = entries.length;
if (!len)
return cb();
const next = () => {
if (--len === 0) {
cb();
}
};
for (const e of entries) {
if (!filter || filter(e)) {
results.push(withFileTypes ? e : e.fullpath());
}
if (follow && e.isSymbolicLink()) {
e.realpath()
.then(r => (r?.isUnknown() ? r.lstat() : r))
.then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
}
else {
if (e.shouldWalk(dirs, walkFilter)) {
walk(e, next);
}
else {
next();
}
}
}
}, true); // zalgooooooo
};
const start = entry;
return new Promise((res, rej) => {
walk(start, er => {
/* c8 ignore start */
if (er)
return rej(er);
/* c8 ignore stop */
res(results);
});
});
}
walkSync(entry = this.cwd, opts = {}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
opts = entry;
entry = this.cwd;
}
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
const results = [];
if (!filter || filter(entry)) {
results.push(withFileTypes ? entry : entry.fullpath());
}
const dirs = new Set([entry]);
for (const dir of dirs) {
const entries = dir.readdirSync();
for (const e of entries) {
if (!filter || filter(e)) {
results.push(withFileTypes ? e : e.fullpath());
}
let r = e;
if (e.isSymbolicLink()) {
if (!(follow && (r = e.realpathSync())))
continue;
if (r.isUnknown())
r.lstatSync();
}
if (r.shouldWalk(dirs, walkFilter)) {
dirs.add(r);
}
}
}
return results;
}
/**
* Support for `for await`
*
* Alias for {@link PathScurryBase.iterate}
*
* Note: As of Node 19, this is very slow, compared to other methods of
* walking. Consider using {@link PathScurryBase.stream} if memory overhead
* and backpressure are concerns, or {@link PathScurryBase.walk} if not.
*/
[Symbol.asyncIterator]() {
return this.iterate();
}
iterate(entry = this.cwd, options = {}) {
// iterating async over the stream is significantly more performant,
// especially in the warm-cache scenario, because it buffers up directory
// entries in the background instead of waiting for a yield for each one.
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
options = entry;
entry = this.cwd;
}
return this.stream(entry, options)[Symbol.asyncIterator]();
}
/**
* Iterating over a PathScurry performs a synchronous walk.
*
* Alias for {@link PathScurryBase.iterateSync}
*/
[Symbol.iterator]() {
return this.iterateSync();
}
*iterateSync(entry = this.cwd, opts = {}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
opts = entry;
entry = this.cwd;
}
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
if (!filter || filter(entry)) {
yield withFileTypes ? entry : entry.fullpath();
}
const dirs = new Set([entry]);
for (const dir of dirs) {
const entries = dir.readdirSync();
for (const e of entries) {
if (!filter || filter(e)) {
yield withFileTypes ? e : e.fullpath();
}
let r = e;
if (e.isSymbolicLink()) {
if (!(follow && (r = e.realpathSync())))
continue;
if (r.isUnknown())
r.lstatSync();
}
if (r.shouldWalk(dirs, walkFilter)) {
dirs.add(r);
}
}
}
}
stream(entry = this.cwd, opts = {}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
opts = entry;
entry = this.cwd;
}
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
const results = new Minipass({ objectMode: true });
if (!filter || filter(entry)) {
results.write(withFileTypes ? entry : entry.fullpath());
}
const dirs = new Set();
const queue = [entry];
let processing = 0;
const process = () => {
let paused = false;
while (!paused) {
const dir = queue.shift();
if (!dir) {
if (processing === 0)
results.end();
return;
}
processing++;
dirs.add(dir);
const onReaddir = (er, entries, didRealpaths = false) => {
/* c8 ignore start */
if (er)
return results.emit('error', er);
/* c8 ignore stop */
if (follow && !didRealpaths) {
const promises = [];
for (const e of entries) {
if (e.isSymbolicLink()) {
promises.push(e
.realpath()
.then((r) => r?.isUnknown() ? r.lstat() : r));
}
}
if (promises.length) {
Promise.all(promises).then(() => onReaddir(null, entries, true));
return;
}
}
for (const e of entries) {
if (e && (!filter || filter(e))) {
if (!results.write(withFileTypes ? e : e.fullpath())) {
paused = true;
}
}
}
processing--;
for (const e of entries) {
const r = e.realpathCached() || e;
if (r.shouldWalk(dirs, walkFilter)) {
queue.push(r);
}
}
if (paused && !results.flowing) {
results.once('drain', process);
}
else if (!sync) {
process();
}
};
// zalgo containment
let sync = true;
dir.readdirCB(onReaddir, true);
sync = false;
}
};
process();
return results;
}
streamSync(entry = this.cwd, opts = {}) {
if (typeof entry === 'string') {
entry = this.cwd.resolve(entry);
}
else if (!(entry instanceof PathBase)) {
opts = entry;
entry = this.cwd;
}
const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
const results = new Minipass({ objectMode: true });
const dirs = new Set();
if (!filter || filter(entry)) {
results.write(withFileTypes ? entry : entry.fullpath());
}
const queue = [entry];
let processing = 0;
const process = () => {
let paused = false;
while (!paused) {
const dir = queue.shift();
if (!dir) {
if (processing === 0)
results.end();
return;
}
processing++;
dirs.add(dir);
const entries = dir.readdirSync();
for (const e of entries) {
if (!filter || filter(e)) {
if (!results.write(withFileTypes ? e : e.fullpath())) {
paused = true;
}
}
}
processing--;
for (const e of entries) {
let r = e;
if (e.isSymbolicLink()) {
if (!(follow && (r = e.realpathSync())))
continue;
if (r.isUnknown())
r.lstatSync();
}
if (r.shouldWalk(dirs, walkFilter)) {
queue.push(r);
}
}
}
if (paused && !results.flowing)
results.once('drain', process);
};
process();
return results;
}
chdir(path = this.cwd) {
const oldCwd = this.cwd;
this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
this.cwd[setAsCwd](oldCwd);
}
}
/**
* Windows implementation of {@link PathScurryBase}
*
* Defaults to case insensitve, uses `'\\'` to generate path strings. Uses
* {@link PathWin32} for Path objects.
*/
class PathScurryWin32 extends PathScurryBase {
/**
* separator for generating path strings
*/
sep = '\\';
constructor(cwd = process.cwd(), opts = {}) {
const { nocase = true } = opts;
super(cwd, win32$1, '\\', { ...opts, nocase });
this.nocase = nocase;
for (let p = this.cwd; p; p = p.parent) {
p.nocase = this.nocase;
}
}
/**
* @internal
*/
parseRootPath(dir) {
// if the path starts with a single separator, it's not a UNC, and we'll
// just get separator as the root, and driveFromUNC will return \
// In that case, mount \ on the root from the cwd.
return win32$1.parse(dir).root.toUpperCase();
}
/**
* @internal
*/
newRoot(fs) {
return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
}
/**
* Return true if the provided path string is an absolute path
*/
isAbsolute(p) {
return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
}
}
/**
* {@link PathScurryBase} implementation for all posix systems other than Darwin.
*
* Defaults to case-sensitive matching, uses `'/'` to generate path strings.
*
* Uses {@link PathPosix} for Path objects.
*/
class PathScurryPosix extends PathScurryBase {
/**
* separator for generating path strings
*/
sep = '/';
constructor(cwd = process.cwd(), opts = {}) {
const { nocase = false } = opts;
super(cwd, posix$1, '/', { ...opts, nocase });
this.nocase = nocase;
}
/**
* @internal
*/
parseRootPath(_dir) {
return '/';
}
/**
* @internal
*/
newRoot(fs) {
return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
}
/**
* Return true if the provided path string is an absolute path
*/
isAbsolute(p) {
return p.startsWith('/');
}
}
/**
* {@link PathScurryBase} implementation for Darwin (macOS) systems.
*
* Defaults to case-insensitive matching, uses `'/'` for generating path
* strings.
*
* Uses {@link PathPosix} for Path objects.
*/
class PathScurryDarwin extends PathScurryPosix {
constructor(cwd = process.cwd(), opts = {}) {
const { nocase = true } = opts;
super(cwd, { ...opts, nocase });
}
}
/**
* Default {@link PathBase} implementation for the current platform.
*
* {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
*/
process.platform === 'win32' ? PathWin32 : PathPosix;
/**
* Default {@link PathScurryBase} implementation for the current platform.
*
* {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
* Darwin (macOS) systems, {@link PathScurryPosix} on all others.
*/
const PathScurry = process.platform === 'win32' ? PathScurryWin32
: process.platform === 'darwin' ? PathScurryDarwin
: PathScurryPosix;
// this is just a very light wrapper around 2 arrays with an offset index
const isPatternList = (pl) => pl.length >= 1;
const isGlobList = (gl) => gl.length >= 1;
/**
* An immutable-ish view on an array of glob parts and their parsed
* results
*/
class Pattern {
#patternList;
#globList;
#index;
length;
#platform;
#rest;
#globString;
#isDrive;
#isUNC;
#isAbsolute;
#followGlobstar = true;
constructor(patternList, globList, index, platform) {
if (!isPatternList(patternList)) {
throw new TypeError('empty pattern list');
}
if (!isGlobList(globList)) {
throw new TypeError('empty glob list');
}
if (globList.length !== patternList.length) {
throw new TypeError('mismatched pattern list and glob list lengths');
}
this.length = patternList.length;
if (index < 0 || index >= this.length) {
throw new TypeError('index out of range');
}
this.#patternList = patternList;
this.#globList = globList;
this.#index = index;
this.#platform = platform;
// normalize root entries of absolute patterns on initial creation.
if (this.#index === 0) {
// c: => ['c:/']
// C:/ => ['C:/']
// C:/x => ['C:/', 'x']
// //host/share => ['//host/share/']
// //host/share/ => ['//host/share/']
// //host/share/x => ['//host/share/', 'x']
// /etc => ['/', 'etc']
// / => ['/']
if (this.isUNC()) {
// '' / '' / 'host' / 'share'
const [p0, p1, p2, p3, ...prest] = this.#patternList;
const [g0, g1, g2, g3, ...grest] = this.#globList;
if (prest[0] === '') {
// ends in /
prest.shift();
grest.shift();
}
const p = [p0, p1, p2, p3, ''].join('/');
const g = [g0, g1, g2, g3, ''].join('/');
this.#patternList = [p, ...prest];
this.#globList = [g, ...grest];
this.length = this.#patternList.length;
}
else if (this.isDrive() || this.isAbsolute()) {
const [p1, ...prest] = this.#patternList;
const [g1, ...grest] = this.#globList;
if (prest[0] === '') {
// ends in /
prest.shift();
grest.shift();
}
const p = p1 + '/';
const g = g1 + '/';
this.#patternList = [p, ...prest];
this.#globList = [g, ...grest];
this.length = this.#patternList.length;
}
}
}
/**
* The first entry in the parsed list of patterns
*/
pattern() {
return this.#patternList[this.#index];
}
/**
* true of if pattern() returns a string
*/
isString() {
return typeof this.#patternList[this.#index] === 'string';
}
/**
* true of if pattern() returns GLOBSTAR
*/
isGlobstar() {
return this.#patternList[this.#index] === GLOBSTAR$2;
}
/**
* true if pattern() returns a regexp
*/
isRegExp() {
return this.#patternList[this.#index] instanceof RegExp;
}
/**
* The /-joined set of glob parts that make up this pattern
*/
globString() {
return (this.#globString =
this.#globString ||
(this.#index === 0 ?
this.isAbsolute() ?
this.#globList[0] + this.#globList.slice(1).join('/')
: this.#globList.join('/')
: this.#globList.slice(this.#index).join('/')));
}
/**
* true if there are more pattern parts after this one
*/
hasMore() {
return this.length > this.#index + 1;
}
/**
* The rest of the pattern after this part, or null if this is the end
*/
rest() {
if (this.#rest !== undefined)
return this.#rest;
if (!this.hasMore())
return (this.#rest = null);
this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
this.#rest.#isAbsolute = this.#isAbsolute;
this.#rest.#isUNC = this.#isUNC;
this.#rest.#isDrive = this.#isDrive;
return this.#rest;
}
/**
* true if the pattern represents a //unc/path/ on windows
*/
isUNC() {
const pl = this.#patternList;
return this.#isUNC !== undefined ?
this.#isUNC
: (this.#isUNC =
this.#platform === 'win32' &&
this.#index === 0 &&
pl[0] === '' &&
pl[1] === '' &&
typeof pl[2] === 'string' &&
!!pl[2] &&
typeof pl[3] === 'string' &&
!!pl[3]);
}
// pattern like C:/...
// split = ['C:', ...]
// XXX: would be nice to handle patterns like `c:*` to test the cwd
// in c: for *, but I don't know of a way to even figure out what that
// cwd is without actually chdir'ing into it?
/**
* True if the pattern starts with a drive letter on Windows
*/
isDrive() {
const pl = this.#patternList;
return this.#isDrive !== undefined ?
this.#isDrive
: (this.#isDrive =
this.#platform === 'win32' &&
this.#index === 0 &&
this.length > 1 &&
typeof pl[0] === 'string' &&
/^[a-z]:$/i.test(pl[0]));
}
// pattern = '/' or '/...' or '/x/...'
// split = ['', ''] or ['', ...] or ['', 'x', ...]
// Drive and UNC both considered absolute on windows
/**
* True if the pattern is rooted on an absolute path
*/
isAbsolute() {
const pl = this.#patternList;
return this.#isAbsolute !== undefined ?
this.#isAbsolute
: (this.#isAbsolute =
(pl[0] === '' && pl.length > 1) ||
this.isDrive() ||
this.isUNC());
}
/**
* consume the root of the pattern, and return it
*/
root() {
const p = this.#patternList[0];
return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ?
p
: '';
}
/**
* Check to see if the current globstar pattern is allowed to follow
* a symbolic link.
*/
checkFollowGlobstar() {
return !(this.#index === 0 ||
!this.isGlobstar() ||
!this.#followGlobstar);
}
/**
* Mark that the current globstar pattern is following a symbolic link
*/
markFollowGlobstar() {
if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
return false;
this.#followGlobstar = false;
return true;
}
}
// give it a pattern, and it'll be able to tell you if
// a given path should be ignored.
// Ignoring a path ignores its children if the pattern ends in /**
// Ignores are always parsed in dot:true mode
const defaultPlatform$1 = (typeof process === 'object' &&
process &&
typeof process.platform === 'string') ?
process.platform
: 'linux';
/**
* Class used to process ignored patterns
*/
class Ignore {
relative;
relativeChildren;
absolute;
absoluteChildren;
platform;
mmopts;
constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform$1, }) {
this.relative = [];
this.absolute = [];
this.relativeChildren = [];
this.absoluteChildren = [];
this.platform = platform;
this.mmopts = {
dot: true,
nobrace,
nocase,
noext,
noglobstar,
optimizationLevel: 2,
platform,
nocomment: true,
nonegate: true,
};
for (const ign of ignored)
this.add(ign);
}
add(ign) {
// this is a little weird, but it gives us a clean set of optimized
// minimatch matchers, without getting tripped up if one of them
// ends in /** inside a brace section, and it's only inefficient at
// the start of the walk, not along it.
// It'd be nice if the Pattern class just had a .test() method, but
// handling globstars is a bit of a pita, and that code already lives
// in minimatch anyway.
// Another way would be if maybe Minimatch could take its set/globParts
// as an option, and then we could at least just use Pattern to test
// for absolute-ness.
// Yet another way, Minimatch could take an array of glob strings, and
// a cwd option, and do the right thing.
const mm = new Minimatch(ign, this.mmopts);
for (let i = 0; i < mm.set.length; i++) {
const parsed = mm.set[i];
const globParts = mm.globParts[i];
/* c8 ignore start */
if (!parsed || !globParts) {
throw new Error('invalid pattern object');
}
// strip off leading ./ portions
// https://github.com/isaacs/node-glob/issues/570
while (parsed[0] === '.' && globParts[0] === '.') {
parsed.shift();
globParts.shift();
}
/* c8 ignore stop */
const p = new Pattern(parsed, globParts, 0, this.platform);
const m = new Minimatch(p.globString(), this.mmopts);
const children = globParts[globParts.length - 1] === '**';
const absolute = p.isAbsolute();
if (absolute)
this.absolute.push(m);
else
this.relative.push(m);
if (children) {
if (absolute)
this.absoluteChildren.push(m);
else
this.relativeChildren.push(m);
}
}
}
ignored(p) {
const fullpath = p.fullpath();
const fullpaths = `${fullpath}/`;
const relative = p.relative() || '.';
const relatives = `${relative}/`;
for (const m of this.relative) {
if (m.match(relative) || m.match(relatives))
return true;
}
for (const m of this.absolute) {
if (m.match(fullpath) || m.match(fullpaths))
return true;
}
return false;
}
childrenIgnored(p) {
const fullpath = p.fullpath() + '/';
const relative = (p.relative() || '.') + '/';
for (const m of this.relativeChildren) {
if (m.match(relative))
return true;
}
for (const m of this.absoluteChildren) {
if (m.match(fullpath))
return true;
}
return false;
}
}
// synchronous utility for filtering entries and calculating subwalks
/**
* A cache of which patterns have been processed for a given Path
*/
class HasWalkedCache {
store;
constructor(store = new Map()) {
this.store = store;
}
copy() {
return new HasWalkedCache(new Map(this.store));
}
hasWalked(target, pattern) {
return this.store.get(target.fullpath())?.has(pattern.globString());
}
storeWalked(target, pattern) {
const fullpath = target.fullpath();
const cached = this.store.get(fullpath);
if (cached)
cached.add(pattern.globString());
else
this.store.set(fullpath, new Set([pattern.globString()]));
}
}
/**
* A record of which paths have been matched in a given walk step,
* and whether they only are considered a match if they are a directory,
* and whether their absolute or relative path should be returned.
*/
class MatchRecord {
store = new Map();
add(target, absolute, ifDir) {
const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0);
const current = this.store.get(target);
this.store.set(target, current === undefined ? n : n & current);
}
// match, absolute, ifdir
entries() {
return [...this.store.entries()].map(([path, n]) => [
path,
!!(n & 2),
!!(n & 1),
]);
}
}
/**
* A collection of patterns that must be processed in a subsequent step
* for a given path.
*/
class SubWalks {
store = new Map();
add(target, pattern) {
if (!target.canReaddir()) {
return;
}
const subs = this.store.get(target);
if (subs) {
if (!subs.find(p => p.globString() === pattern.globString())) {
subs.push(pattern);
}
}
else
this.store.set(target, [pattern]);
}
get(target) {
const subs = this.store.get(target);
/* c8 ignore start */
if (!subs) {
throw new Error('attempting to walk unknown path');
}
/* c8 ignore stop */
return subs;
}
entries() {
return this.keys().map(k => [k, this.store.get(k)]);
}
keys() {
return [...this.store.keys()].filter(t => t.canReaddir());
}
}
/**
* The class that processes patterns for a given path.
*
* Handles child entry filtering, and determining whether a path's
* directory contents must be read.
*/
class Processor {
hasWalkedCache;
matches = new MatchRecord();
subwalks = new SubWalks();
patterns;
follow;
dot;
opts;
constructor(opts, hasWalkedCache) {
this.opts = opts;
this.follow = !!opts.follow;
this.dot = !!opts.dot;
this.hasWalkedCache =
hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache();
}
processPatterns(target, patterns) {
this.patterns = patterns;
const processingSet = patterns.map(p => [target, p]);
// map of paths to the magic-starting subwalks they need to walk
// first item in patterns is the filter
for (let [t, pattern] of processingSet) {
this.hasWalkedCache.storeWalked(t, pattern);
const root = pattern.root();
const absolute = pattern.isAbsolute() && this.opts.absolute !== false;
// start absolute patterns at root
if (root) {
t = t.resolve(root === '/' && this.opts.root !== undefined ?
this.opts.root
: root);
const rest = pattern.rest();
if (!rest) {
this.matches.add(t, true, false);
continue;
}
else {
pattern = rest;
}
}
if (t.isENOENT())
continue;
let p;
let rest;
let changed = false;
while (typeof (p = pattern.pattern()) === 'string' &&
(rest = pattern.rest())) {
const c = t.resolve(p);
t = c;
pattern = rest;
changed = true;
}
p = pattern.pattern();
rest = pattern.rest();
if (changed) {
if (this.hasWalkedCache.hasWalked(t, pattern))
continue;
this.hasWalkedCache.storeWalked(t, pattern);
}
// now we have either a final string for a known entry,
// more strings for an unknown entry,
// or a pattern starting with magic, mounted on t.
if (typeof p === 'string') {
// must not be final entry, otherwise we would have
// concatenated it earlier.
const ifDir = p === '..' || p === '' || p === '.';
this.matches.add(t.resolve(p), absolute, ifDir);
continue;
}
else if (p === GLOBSTAR$2) {
// if no rest, match and subwalk pattern
// if rest, process rest and subwalk pattern
// if it's a symlink, but we didn't get here by way of a
// globstar match (meaning it's the first time THIS globstar
// has traversed a symlink), then we follow it. Otherwise, stop.
if (!t.isSymbolicLink() ||
this.follow ||
pattern.checkFollowGlobstar()) {
this.subwalks.add(t, pattern);
}
const rp = rest?.pattern();
const rrest = rest?.rest();
if (!rest || ((rp === '' || rp === '.') && !rrest)) {
// only HAS to be a dir if it ends in **/ or **/.
// but ending in ** will match files as well.
this.matches.add(t, absolute, rp === '' || rp === '.');
}
else {
if (rp === '..') {
// this would mean you're matching **/.. at the fs root,
// and no thanks, I'm not gonna test that specific case.
/* c8 ignore start */
const tp = t.parent || t;
/* c8 ignore stop */
if (!rrest)
this.matches.add(tp, absolute, true);
else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {
this.subwalks.add(tp, rrest);
}
}
}
}
else if (p instanceof RegExp) {
this.subwalks.add(t, pattern);
}
}
return this;
}
subwalkTargets() {
return this.subwalks.keys();
}
child() {
return new Processor(this.opts, this.hasWalkedCache);
}
// return a new Processor containing the subwalks for each
// child entry, and a set of matches, and
// a hasWalkedCache that's a copy of this one
// then we're going to call
filterEntries(parent, entries) {
const patterns = this.subwalks.get(parent);
// put matches and entry walks into the results processor
const results = this.child();
for (const e of entries) {
for (const pattern of patterns) {
const absolute = pattern.isAbsolute();
const p = pattern.pattern();
const rest = pattern.rest();
if (p === GLOBSTAR$2) {
results.testGlobstar(e, pattern, rest, absolute);
}
else if (p instanceof RegExp) {
results.testRegExp(e, p, rest, absolute);
}
else {
results.testString(e, p, rest, absolute);
}
}
}
return results;
}
testGlobstar(e, pattern, rest, absolute) {
if (this.dot || !e.name.startsWith('.')) {
if (!pattern.hasMore()) {
this.matches.add(e, absolute, false);
}
if (e.canReaddir()) {
// if we're in follow mode or it's not a symlink, just keep
// testing the same pattern. If there's more after the globstar,
// then this symlink consumes the globstar. If not, then we can
// follow at most ONE symlink along the way, so we mark it, which
// also checks to ensure that it wasn't already marked.
if (this.follow || !e.isSymbolicLink()) {
this.subwalks.add(e, pattern);
}
else if (e.isSymbolicLink()) {
if (rest && pattern.checkFollowGlobstar()) {
this.subwalks.add(e, rest);
}
else if (pattern.markFollowGlobstar()) {
this.subwalks.add(e, pattern);
}
}
}
}
// if the NEXT thing matches this entry, then also add
// the rest.
if (rest) {
const rp = rest.pattern();
if (typeof rp === 'string' &&
// dots and empty were handled already
rp !== '..' &&
rp !== '' &&
rp !== '.') {
this.testString(e, rp, rest.rest(), absolute);
}
else if (rp === '..') {
/* c8 ignore start */
const ep = e.parent || e;
/* c8 ignore stop */
this.subwalks.add(ep, rest);
}
else if (rp instanceof RegExp) {
this.testRegExp(e, rp, rest.rest(), absolute);
}
}
}
testRegExp(e, p, rest, absolute) {
if (!p.test(e.name))
return;
if (!rest) {
this.matches.add(e, absolute, false);
}
else {
this.subwalks.add(e, rest);
}
}
testString(e, p, rest, absolute) {
// should never happen?
if (!e.isNamed(p))
return;
if (!rest) {
this.matches.add(e, absolute, false);
}
else {
this.subwalks.add(e, rest);
}
}
}
/**
* Single-use utility classes to provide functionality to the {@link Glob}
* methods.
*
* @module
*/
const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new Ignore([ignore], opts)
: Array.isArray(ignore) ? new Ignore(ignore, opts)
: ignore;
/**
* basic walking utilities that all the glob walker types use
*/
class GlobUtil {
path;
patterns;
opts;
seen = new Set();
paused = false;
aborted = false;
#onResume = [];
#ignore;
#sep;
signal;
maxDepth;
includeChildMatches;
constructor(patterns, path, opts) {
this.patterns = patterns;
this.path = path;
this.opts = opts;
this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/';
this.includeChildMatches = opts.includeChildMatches !== false;
if (opts.ignore || !this.includeChildMatches) {
this.#ignore = makeIgnore(opts.ignore ?? [], opts);
if (!this.includeChildMatches &&
typeof this.#ignore.add !== 'function') {
const m = 'cannot ignore child matches, ignore lacks add() method.';
throw new Error(m);
}
}
// ignore, always set with maxDepth, but it's optional on the
// GlobOptions type
/* c8 ignore start */
this.maxDepth = opts.maxDepth || Infinity;
/* c8 ignore stop */
if (opts.signal) {
this.signal = opts.signal;
this.signal.addEventListener('abort', () => {
this.#onResume.length = 0;
});
}
}
#ignored(path) {
return this.seen.has(path) || !!this.#ignore?.ignored?.(path);
}
#childrenIgnored(path) {
return !!this.#ignore?.childrenIgnored?.(path);
}
// backpressure mechanism
pause() {
this.paused = true;
}
resume() {
/* c8 ignore start */
if (this.signal?.aborted)
return;
/* c8 ignore stop */
this.paused = false;
let fn = undefined;
while (!this.paused && (fn = this.#onResume.shift())) {
fn();
}
}
onResume(fn) {
if (this.signal?.aborted)
return;
/* c8 ignore start */
if (!this.paused) {
fn();
}
else {
/* c8 ignore stop */
this.#onResume.push(fn);
}
}
// do the requisite realpath/stat checking, and return the path
// to add or undefined to filter it out.
async matchCheck(e, ifDir) {
if (ifDir && this.opts.nodir)
return undefined;
let rpc;
if (this.opts.realpath) {
rpc = e.realpathCached() || (await e.realpath());
if (!rpc)
return undefined;
e = rpc;
}
const needStat = e.isUnknown() || this.opts.stat;
const s = needStat ? await e.lstat() : e;
if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
const target = await s.realpath();
/* c8 ignore start */
if (target && (target.isUnknown() || this.opts.stat)) {
await target.lstat();
}
/* c8 ignore stop */
}
return this.matchCheckTest(s, ifDir);
}
matchCheckTest(e, ifDir) {
return (e &&
(this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&
(!ifDir || e.canReaddir()) &&
(!this.opts.nodir || !e.isDirectory()) &&
(!this.opts.nodir ||
!this.opts.follow ||
!e.isSymbolicLink() ||
!e.realpathCached()?.isDirectory()) &&
!this.#ignored(e)) ?
e
: undefined;
}
matchCheckSync(e, ifDir) {
if (ifDir && this.opts.nodir)
return undefined;
let rpc;
if (this.opts.realpath) {
rpc = e.realpathCached() || e.realpathSync();
if (!rpc)
return undefined;
e = rpc;
}
const needStat = e.isUnknown() || this.opts.stat;
const s = needStat ? e.lstatSync() : e;
if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
const target = s.realpathSync();
if (target && (target?.isUnknown() || this.opts.stat)) {
target.lstatSync();
}
}
return this.matchCheckTest(s, ifDir);
}
matchFinish(e, absolute) {
if (this.#ignored(e))
return;
// we know we have an ignore if this is false, but TS doesn't
if (!this.includeChildMatches && this.#ignore?.add) {
const ign = `${e.relativePosix()}/**`;
this.#ignore.add(ign);
}
const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute;
this.seen.add(e);
const mark = this.opts.mark && e.isDirectory() ? this.#sep : '';
// ok, we have what we need!
if (this.opts.withFileTypes) {
this.matchEmit(e);
}
else if (abs) {
const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath();
this.matchEmit(abs + mark);
}
else {
const rel = this.opts.posix ? e.relativePosix() : e.relative();
const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ?
'.' + this.#sep
: '';
this.matchEmit(!rel ? '.' + mark : pre + rel + mark);
}
}
async match(e, absolute, ifDir) {
const p = await this.matchCheck(e, ifDir);
if (p)
this.matchFinish(p, absolute);
}
matchSync(e, absolute, ifDir) {
const p = this.matchCheckSync(e, ifDir);
if (p)
this.matchFinish(p, absolute);
}
walkCB(target, patterns, cb) {
/* c8 ignore start */
if (this.signal?.aborted)
cb();
/* c8 ignore stop */
this.walkCB2(target, patterns, new Processor(this.opts), cb);
}
walkCB2(target, patterns, processor, cb) {
if (this.#childrenIgnored(target))
return cb();
if (this.signal?.aborted)
cb();
if (this.paused) {
this.onResume(() => this.walkCB2(target, patterns, processor, cb));
return;
}
processor.processPatterns(target, patterns);
// done processing. all of the above is sync, can be abstracted out.
// subwalks is a map of paths to the entry filters they need
// matches is a map of paths to [absolute, ifDir] tuples.
let tasks = 1;
const next = () => {
if (--tasks === 0)
cb();
};
for (const [m, absolute, ifDir] of processor.matches.entries()) {
if (this.#ignored(m))
continue;
tasks++;
this.match(m, absolute, ifDir).then(() => next());
}
for (const t of processor.subwalkTargets()) {
if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
continue;
}
tasks++;
const childrenCached = t.readdirCached();
if (t.calledReaddir())
this.walkCB3(t, childrenCached, processor, next);
else {
t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true);
}
}
next();
}
walkCB3(target, entries, processor, cb) {
processor = processor.filterEntries(target, entries);
let tasks = 1;
const next = () => {
if (--tasks === 0)
cb();
};
for (const [m, absolute, ifDir] of processor.matches.entries()) {
if (this.#ignored(m))
continue;
tasks++;
this.match(m, absolute, ifDir).then(() => next());
}
for (const [target, patterns] of processor.subwalks.entries()) {
tasks++;
this.walkCB2(target, patterns, processor.child(), next);
}
next();
}
walkCBSync(target, patterns, cb) {
/* c8 ignore start */
if (this.signal?.aborted)
cb();
/* c8 ignore stop */
this.walkCB2Sync(target, patterns, new Processor(this.opts), cb);
}
walkCB2Sync(target, patterns, processor, cb) {
if (this.#childrenIgnored(target))
return cb();
if (this.signal?.aborted)
cb();
if (this.paused) {
this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb));
return;
}
processor.processPatterns(target, patterns);
// done processing. all of the above is sync, can be abstracted out.
// subwalks is a map of paths to the entry filters they need
// matches is a map of paths to [absolute, ifDir] tuples.
let tasks = 1;
const next = () => {
if (--tasks === 0)
cb();
};
for (const [m, absolute, ifDir] of processor.matches.entries()) {
if (this.#ignored(m))
continue;
this.matchSync(m, absolute, ifDir);
}
for (const t of processor.subwalkTargets()) {
if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
continue;
}
tasks++;
const children = t.readdirSync();
this.walkCB3Sync(t, children, processor, next);
}
next();
}
walkCB3Sync(target, entries, processor, cb) {
processor = processor.filterEntries(target, entries);
let tasks = 1;
const next = () => {
if (--tasks === 0)
cb();
};
for (const [m, absolute, ifDir] of processor.matches.entries()) {
if (this.#ignored(m))
continue;
this.matchSync(m, absolute, ifDir);
}
for (const [target, patterns] of processor.subwalks.entries()) {
tasks++;
this.walkCB2Sync(target, patterns, processor.child(), next);
}
next();
}
}
class GlobWalker extends GlobUtil {
matches = new Set();
constructor(patterns, path, opts) {
super(patterns, path, opts);
}
matchEmit(e) {
this.matches.add(e);
}
async walk() {
if (this.signal?.aborted)
throw this.signal.reason;
if (this.path.isUnknown()) {
await this.path.lstat();
}
await new Promise((res, rej) => {
this.walkCB(this.path, this.patterns, () => {
if (this.signal?.aborted) {
rej(this.signal.reason);
}
else {
res(this.matches);
}
});
});
return this.matches;
}
walkSync() {
if (this.signal?.aborted)
throw this.signal.reason;
if (this.path.isUnknown()) {
this.path.lstatSync();
}
// nothing for the callback to do, because this never pauses
this.walkCBSync(this.path, this.patterns, () => {
if (this.signal?.aborted)
throw this.signal.reason;
});
return this.matches;
}
}
class GlobStream extends GlobUtil {
results;
constructor(patterns, path, opts) {
super(patterns, path, opts);
this.results = new Minipass({
signal: this.signal,
objectMode: true,
});
this.results.on('drain', () => this.resume());
this.results.on('resume', () => this.resume());
}
matchEmit(e) {
this.results.write(e);
if (!this.results.flowing)
this.pause();
}
stream() {
const target = this.path;
if (target.isUnknown()) {
target.lstat().then(() => {
this.walkCB(target, this.patterns, () => this.results.end());
});
}
else {
this.walkCB(target, this.patterns, () => this.results.end());
}
return this.results;
}
streamSync() {
if (this.path.isUnknown()) {
this.path.lstatSync();
}
this.walkCBSync(this.path, this.patterns, () => this.results.end());
return this.results;
}
}
// if no process global, just call it linux.
// so we default to case-sensitive, / separators
const defaultPlatform = (typeof process === 'object' &&
process &&
typeof process.platform === 'string') ?
process.platform
: 'linux';
/**
* An object that can perform glob pattern traversals.
*/
class Glob {
absolute;
cwd;
root;
dot;
dotRelative;
follow;
ignore;
magicalBraces;
mark;
matchBase;
maxDepth;
nobrace;
nocase;
nodir;
noext;
noglobstar;
pattern;
platform;
realpath;
scurry;
stat;
signal;
windowsPathsNoEscape;
withFileTypes;
includeChildMatches;
/**
* The options provided to the constructor.
*/
opts;
/**
* An array of parsed immutable {@link Pattern} objects.
*/
patterns;
/**
* All options are stored as properties on the `Glob` object.
*
* See {@link GlobOptions} for full options descriptions.
*
* Note that a previous `Glob` object can be passed as the
* `GlobOptions` to another `Glob` instantiation to re-use settings
* and caches with a new pattern.
*
* Traversal functions can be called multiple times to run the walk
* again.
*/
constructor(pattern, opts) {
/* c8 ignore start */
if (!opts)
throw new TypeError('glob options required');
/* c8 ignore stop */
this.withFileTypes = !!opts.withFileTypes;
this.signal = opts.signal;
this.follow = !!opts.follow;
this.dot = !!opts.dot;
this.dotRelative = !!opts.dotRelative;
this.nodir = !!opts.nodir;
this.mark = !!opts.mark;
if (!opts.cwd) {
this.cwd = '';
}
else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
opts.cwd = fileURLToPath(opts.cwd);
}
this.cwd = opts.cwd || '';
this.root = opts.root;
this.magicalBraces = !!opts.magicalBraces;
this.nobrace = !!opts.nobrace;
this.noext = !!opts.noext;
this.realpath = !!opts.realpath;
this.absolute = opts.absolute;
this.includeChildMatches = opts.includeChildMatches !== false;
this.noglobstar = !!opts.noglobstar;
this.matchBase = !!opts.matchBase;
this.maxDepth =
typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
this.stat = !!opts.stat;
this.ignore = opts.ignore;
if (this.withFileTypes && this.absolute !== undefined) {
throw new Error('cannot set absolute and withFileTypes:true');
}
if (typeof pattern === 'string') {
pattern = [pattern];
}
this.windowsPathsNoEscape =
!!opts.windowsPathsNoEscape ||
opts.allowWindowsEscape ===
false;
if (this.windowsPathsNoEscape) {
pattern = pattern.map(p => p.replace(/\\/g, '/'));
}
if (this.matchBase) {
if (opts.noglobstar) {
throw new TypeError('base matching requires globstar');
}
pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
}
this.pattern = pattern;
this.platform = opts.platform || defaultPlatform;
this.opts = { ...opts, platform: this.platform };
if (opts.scurry) {
this.scurry = opts.scurry;
if (opts.nocase !== undefined &&
opts.nocase !== opts.scurry.nocase) {
throw new Error('nocase option contradicts provided scurry option');
}
}
else {
const Scurry = opts.platform === 'win32' ? PathScurryWin32
: opts.platform === 'darwin' ? PathScurryDarwin
: opts.platform ? PathScurryPosix
: PathScurry;
this.scurry = new Scurry(this.cwd, {
nocase: opts.nocase,
fs: opts.fs,
});
}
this.nocase = this.scurry.nocase;
// If you do nocase:true on a case-sensitive file system, then
// we need to use regexps instead of strings for non-magic
// path portions, because statting `aBc` won't return results
// for the file `AbC` for example.
const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32';
const mmo = {
// default nocase based on platform
...opts,
dot: this.dot,
matchBase: this.matchBase,
nobrace: this.nobrace,
nocase: this.nocase,
nocaseMagicOnly,
nocomment: true,
noext: this.noext,
nonegate: true,
optimizationLevel: 2,
platform: this.platform,
windowsPathsNoEscape: this.windowsPathsNoEscape,
debug: !!this.opts.debug,
};
const mms = this.pattern.map(p => new Minimatch(p, mmo));
const [matchSet, globParts] = mms.reduce((set, m) => {
set[0].push(...m.set);
set[1].push(...m.globParts);
return set;
}, [[], []]);
this.patterns = matchSet.map((set, i) => {
const g = globParts[i];
/* c8 ignore start */
if (!g)
throw new Error('invalid pattern object');
/* c8 ignore stop */
return new Pattern(set, g, 0, this.platform);
});
}
async walk() {
// Walkers always return array of Path objects, so we just have to
// coerce them into the right shape. It will have already called
// realpath() if the option was set to do so, so we know that's cached.
// start out knowing the cwd, at least
return [
...(await new GlobWalker(this.patterns, this.scurry.cwd, {
...this.opts,
maxDepth: this.maxDepth !== Infinity ?
this.maxDepth + this.scurry.cwd.depth()
: Infinity,
platform: this.platform,
nocase: this.nocase,
includeChildMatches: this.includeChildMatches,
}).walk()),
];
}
walkSync() {
return [
...new GlobWalker(this.patterns, this.scurry.cwd, {
...this.opts,
maxDepth: this.maxDepth !== Infinity ?
this.maxDepth + this.scurry.cwd.depth()
: Infinity,
platform: this.platform,
nocase: this.nocase,
includeChildMatches: this.includeChildMatches,
}).walkSync(),
];
}
stream() {
return new GlobStream(this.patterns, this.scurry.cwd, {
...this.opts,
maxDepth: this.maxDepth !== Infinity ?
this.maxDepth + this.scurry.cwd.depth()
: Infinity,
platform: this.platform,
nocase: this.nocase,
includeChildMatches: this.includeChildMatches,
}).stream();
}
streamSync() {
return new GlobStream(this.patterns, this.scurry.cwd, {
...this.opts,
maxDepth: this.maxDepth !== Infinity ?
this.maxDepth + this.scurry.cwd.depth()
: Infinity,
platform: this.platform,
nocase: this.nocase,
includeChildMatches: this.includeChildMatches,
}).streamSync();
}
/**
* Default sync iteration function. Returns a Generator that
* iterates over the results.
*/
iterateSync() {
return this.streamSync()[Symbol.iterator]();
}
[Symbol.iterator]() {
return this.iterateSync();
}
/**
* Default async iteration function. Returns an AsyncGenerator that
* iterates over the results.
*/
iterate() {
return this.stream()[Symbol.asyncIterator]();
}
[Symbol.asyncIterator]() {
return this.iterate();
}
}
/**
* Return true if the patterns provided contain any magic glob characters,
* given the options provided.
*
* Brace expansion is not considered "magic" unless the `magicalBraces` option
* is set, as brace expansion just turns one string into an array of strings.
* So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
* `'xby'` both do not contain any magic glob characters, and it's treated the
* same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
* is in the options, brace expansion _is_ treated as a pattern having magic.
*/
const hasMagic = (pattern, options = {}) => {
if (!Array.isArray(pattern)) {
pattern = [pattern];
}
for (const p of pattern) {
if (new Minimatch(p, options).hasMagic())
return true;
}
return false;
};
function globStreamSync(pattern, options = {}) {
return new Glob(pattern, options).streamSync();
}
function globStream(pattern, options = {}) {
return new Glob(pattern, options).stream();
}
function globSync(pattern, options = {}) {
return new Glob(pattern, options).walkSync();
}
async function glob_(pattern, options = {}) {
return new Glob(pattern, options).walk();
}
function globIterateSync(pattern, options = {}) {
return new Glob(pattern, options).iterateSync();
}
function globIterate(pattern, options = {}) {
return new Glob(pattern, options).iterate();
}
// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc
const streamSync = globStreamSync;
const stream$5 = Object.assign(globStream, { sync: globStreamSync });
const iterateSync = globIterateSync;
const iterate = Object.assign(globIterate, {
sync: globIterateSync,
});
const sync$9 = Object.assign(globSync, {
stream: globStreamSync,
iterate: globIterateSync,
});
const glob$1 = Object.assign(glob_, {
glob: glob_,
globSync,
sync: sync$9,
globStream,
stream: stream$5,
globStreamSync,
streamSync,
globIterate,
iterate,
globIterateSync,
iterateSync,
Glob,
hasMagic,
escape: escape$2,
unescape: unescape$1,
});
glob$1.glob = glob$1;
const comma = ','.charCodeAt(0);
const semicolon = ';'.charCodeAt(0);
const chars$1 = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const intToChar = new Uint8Array(64); // 64 possible chars.
const charToInt = new Uint8Array(128); // z is 122 in ASCII
for (let i = 0; i < chars$1.length; i++) {
const c = chars$1.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
function decodeInteger(reader, relative) {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = reader.next();
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -0x80000000 | -value;
}
return relative + value;
}
function encodeInteger(builder, num, relative) {
let delta = num - relative;
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
do {
let clamped = delta & 0b011111;
delta >>>= 5;
if (delta > 0)
clamped |= 0b100000;
builder.write(intToChar[clamped]);
} while (delta > 0);
return num;
}
function hasMoreVlq(reader, max) {
if (reader.pos >= max)
return false;
return reader.peek() !== comma;
}
const bufLength = 1024 * 16;
// Provide a fallback for older environments.
const td = typeof TextDecoder !== 'undefined'
? /* #__PURE__ */ new TextDecoder()
: typeof Buffer !== 'undefined'
? {
decode(buf) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
},
}
: {
decode(buf) {
let out = '';
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
return out;
},
};
class StringWriter {
constructor() {
this.pos = 0;
this.out = '';
this.buffer = new Uint8Array(bufLength);
}
write(v) {
const { buffer } = this;
buffer[this.pos++] = v;
if (this.pos === bufLength) {
this.out += td.decode(buffer);
this.pos = 0;
}
}
flush() {
const { buffer, out, pos } = this;
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
}
}
class StringReader {
constructor(buffer) {
this.pos = 0;
this.buffer = buffer;
}
next() {
return this.buffer.charCodeAt(this.pos++);
}
peek() {
return this.buffer.charCodeAt(this.pos);
}
indexOf(char) {
const { buffer, pos } = this;
const idx = buffer.indexOf(char, pos);
return idx === -1 ? buffer.length : idx;
}
}
function decode(mappings) {
const { length } = mappings;
const reader = new StringReader(mappings);
const decoded = [];
let genColumn = 0;
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
do {
const semi = reader.indexOf(';');
const line = [];
let sorted = true;
let lastCol = 0;
genColumn = 0;
while (reader.pos < semi) {
let seg;
genColumn = decodeInteger(reader, genColumn);
if (genColumn < lastCol)
sorted = false;
lastCol = genColumn;
if (hasMoreVlq(reader, semi)) {
sourcesIndex = decodeInteger(reader, sourcesIndex);
sourceLine = decodeInteger(reader, sourceLine);
sourceColumn = decodeInteger(reader, sourceColumn);
if (hasMoreVlq(reader, semi)) {
namesIndex = decodeInteger(reader, namesIndex);
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
}
else {
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
}
}
else {
seg = [genColumn];
}
line.push(seg);
reader.pos++;
}
if (!sorted)
sort(line);
decoded.push(line);
reader.pos = semi + 1;
} while (reader.pos <= length);
return decoded;
}
function sort(line) {
line.sort(sortComparator$1);
}
function sortComparator$1(a, b) {
return a[0] - b[0];
}
function encode$1(decoded) {
const writer = new StringWriter();
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0)
writer.write(semicolon);
if (line.length === 0)
continue;
let genColumn = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
if (j > 0)
writer.write(comma);
genColumn = encodeInteger(writer, segment[0], genColumn);
if (segment.length === 1)
continue;
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
sourceLine = encodeInteger(writer, segment[2], sourceLine);
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
if (segment.length === 4)
continue;
namesIndex = encodeInteger(writer, segment[4], namesIndex);
}
}
return writer.flush();
}
class BitSet {
constructor(arg) {
this.bits = arg instanceof BitSet ? arg.bits.slice() : [];
}
add(n) {
this.bits[n >> 5] |= 1 << (n & 31);
}
has(n) {
return !!(this.bits[n >> 5] & (1 << (n & 31)));
}
}
class Chunk {
constructor(start, end, content) {
this.start = start;
this.end = end;
this.original = content;
this.intro = '';
this.outro = '';
this.content = content;
this.storeName = false;
this.edited = false;
{
this.previous = null;
this.next = null;
}
}
appendLeft(content) {
this.outro += content;
}
appendRight(content) {
this.intro = this.intro + content;
}
clone() {
const chunk = new Chunk(this.start, this.end, this.original);
chunk.intro = this.intro;
chunk.outro = this.outro;
chunk.content = this.content;
chunk.storeName = this.storeName;
chunk.edited = this.edited;
return chunk;
}
contains(index) {
return this.start < index && index < this.end;
}
eachNext(fn) {
let chunk = this;
while (chunk) {
fn(chunk);
chunk = chunk.next;
}
}
eachPrevious(fn) {
let chunk = this;
while (chunk) {
fn(chunk);
chunk = chunk.previous;
}
}
edit(content, storeName, contentOnly) {
this.content = content;
if (!contentOnly) {
this.intro = '';
this.outro = '';
}
this.storeName = storeName;
this.edited = true;
return this;
}
prependLeft(content) {
this.outro = content + this.outro;
}
prependRight(content) {
this.intro = content + this.intro;
}
reset() {
this.intro = '';
this.outro = '';
if (this.edited) {
this.content = this.original;
this.storeName = false;
this.edited = false;
}
}
split(index) {
const sliceIndex = index - this.start;
const originalBefore = this.original.slice(0, sliceIndex);
const originalAfter = this.original.slice(sliceIndex);
this.original = originalBefore;
const newChunk = new Chunk(index, this.end, originalAfter);
newChunk.outro = this.outro;
this.outro = '';
this.end = index;
if (this.edited) {
// after split we should save the edit content record into the correct chunk
// to make sure sourcemap correct
// For example:
// ' test'.trim()
// split -> ' ' + 'test'
// ✔️ edit -> '' + 'test'
// ✖️ edit -> 'test' + ''
// TODO is this block necessary?...
newChunk.edit('', false);
this.content = '';
} else {
this.content = originalBefore;
}
newChunk.next = this.next;
if (newChunk.next) newChunk.next.previous = newChunk;
newChunk.previous = this;
this.next = newChunk;
return newChunk;
}
toString() {
return this.intro + this.content + this.outro;
}
trimEnd(rx) {
this.outro = this.outro.replace(rx, '');
if (this.outro.length) return true;
const trimmed = this.content.replace(rx, '');
if (trimmed.length) {
if (trimmed !== this.content) {
this.split(this.start + trimmed.length).edit('', undefined, true);
if (this.edited) {
// save the change, if it has been edited
this.edit(trimmed, this.storeName, true);
}
}
return true;
} else {
this.edit('', undefined, true);
this.intro = this.intro.replace(rx, '');
if (this.intro.length) return true;
}
}
trimStart(rx) {
this.intro = this.intro.replace(rx, '');
if (this.intro.length) return true;
const trimmed = this.content.replace(rx, '');
if (trimmed.length) {
if (trimmed !== this.content) {
const newChunk = this.split(this.end - trimmed.length);
if (this.edited) {
// save the change, if it has been edited
newChunk.edit(trimmed, this.storeName, true);
}
this.edit('', undefined, true);
}
return true;
} else {
this.edit('', undefined, true);
this.outro = this.outro.replace(rx, '');
if (this.outro.length) return true;
}
}
}
function getBtoa() {
if (typeof globalThis !== 'undefined' && typeof globalThis.btoa === 'function') {
return (str) => globalThis.btoa(unescape(encodeURIComponent(str)));
} else if (typeof Buffer === 'function') {
return (str) => Buffer.from(str, 'utf-8').toString('base64');
} else {
return () => {
throw new Error('Unsupported environment: `window.btoa` or `Buffer` should be supported.');
};
}
}
const btoa$1 = /*#__PURE__*/ getBtoa();
let SourceMap$1 = class SourceMap {
constructor(properties) {
this.version = 3;
this.file = properties.file;
this.sources = properties.sources;
this.sourcesContent = properties.sourcesContent;
this.names = properties.names;
this.mappings = encode$1(properties.mappings);
if (typeof properties.x_google_ignoreList !== 'undefined') {
this.x_google_ignoreList = properties.x_google_ignoreList;
}
}
toString() {
return JSON.stringify(this);
}
toUrl() {
return 'data:application/json;charset=utf-8;base64,' + btoa$1(this.toString());
}
};
function guessIndent(code) {
const lines = code.split('\n');
const tabbed = lines.filter((line) => /^\t+/.test(line));
const spaced = lines.filter((line) => /^ {2,}/.test(line));
if (tabbed.length === 0 && spaced.length === 0) {
return null;
}
// More lines tabbed than spaced? Assume tabs, and
// default to tabs in the case of a tie (or nothing
// to go on)
if (tabbed.length >= spaced.length) {
return '\t';
}
// Otherwise, we need to guess the multiple
const min = spaced.reduce((previous, current) => {
const numSpaces = /^ +/.exec(current)[0].length;
return Math.min(numSpaces, previous);
}, Infinity);
return new Array(min + 1).join(' ');
}
function getRelativePath(from, to) {
const fromParts = from.split(/[/\\]/);
const toParts = to.split(/[/\\]/);
fromParts.pop(); // get dirname
while (fromParts[0] === toParts[0]) {
fromParts.shift();
toParts.shift();
}
if (fromParts.length) {
let i = fromParts.length;
while (i--) fromParts[i] = '..';
}
return fromParts.concat(toParts).join('/');
}
const toString$1 = Object.prototype.toString;
function isObject$2(thing) {
return toString$1.call(thing) === '[object Object]';
}
function getLocator(source) {
const originalLines = source.split('\n');
const lineOffsets = [];
for (let i = 0, pos = 0; i < originalLines.length; i++) {
lineOffsets.push(pos);
pos += originalLines[i].length + 1;
}
return function locate(index) {
let i = 0;
let j = lineOffsets.length;
while (i < j) {
const m = (i + j) >> 1;
if (index < lineOffsets[m]) {
j = m;
} else {
i = m + 1;
}
}
const line = i - 1;
const column = index - lineOffsets[line];
return { line, column };
};
}
const wordRegex = /\w/;
class Mappings {
constructor(hires) {
this.hires = hires;
this.generatedCodeLine = 0;
this.generatedCodeColumn = 0;
this.raw = [];
this.rawSegments = this.raw[this.generatedCodeLine] = [];
this.pending = null;
}
addEdit(sourceIndex, content, loc, nameIndex) {
if (content.length) {
const contentLengthMinusOne = content.length - 1;
let contentLineEnd = content.indexOf('\n', 0);
let previousContentLineEnd = -1;
// Loop through each line in the content and add a segment, but stop if the last line is empty,
// else code afterwards would fill one line too many
while (contentLineEnd >= 0 && contentLengthMinusOne > contentLineEnd) {
const segment = [this.generatedCodeColumn, sourceIndex, loc.line, loc.column];
if (nameIndex >= 0) {
segment.push(nameIndex);
}
this.rawSegments.push(segment);
this.generatedCodeLine += 1;
this.raw[this.generatedCodeLine] = this.rawSegments = [];
this.generatedCodeColumn = 0;
previousContentLineEnd = contentLineEnd;
contentLineEnd = content.indexOf('\n', contentLineEnd + 1);
}
const segment = [this.generatedCodeColumn, sourceIndex, loc.line, loc.column];
if (nameIndex >= 0) {
segment.push(nameIndex);
}
this.rawSegments.push(segment);
this.advance(content.slice(previousContentLineEnd + 1));
} else if (this.pending) {
this.rawSegments.push(this.pending);
this.advance(content);
}
this.pending = null;
}
addUneditedChunk(sourceIndex, chunk, original, loc, sourcemapLocations) {
let originalCharIndex = chunk.start;
let first = true;
// when iterating each char, check if it's in a word boundary
let charInHiresBoundary = false;
while (originalCharIndex < chunk.end) {
if (this.hires || first || sourcemapLocations.has(originalCharIndex)) {
const segment = [this.generatedCodeColumn, sourceIndex, loc.line, loc.column];
if (this.hires === 'boundary') {
// in hires "boundary", group segments per word boundary than per char
if (wordRegex.test(original[originalCharIndex])) {
// for first char in the boundary found, start the boundary by pushing a segment
if (!charInHiresBoundary) {
this.rawSegments.push(segment);
charInHiresBoundary = true;
}
} else {
// for non-word char, end the boundary by pushing a segment
this.rawSegments.push(segment);
charInHiresBoundary = false;
}
} else {
this.rawSegments.push(segment);
}
}
if (original[originalCharIndex] === '\n') {
loc.line += 1;
loc.column = 0;
this.generatedCodeLine += 1;
this.raw[this.generatedCodeLine] = this.rawSegments = [];
this.generatedCodeColumn = 0;
first = true;
} else {
loc.column += 1;
this.generatedCodeColumn += 1;
first = false;
}
originalCharIndex += 1;
}
this.pending = null;
}
advance(str) {
if (!str) return;
const lines = str.split('\n');
if (lines.length > 1) {
for (let i = 0; i < lines.length - 1; i++) {
this.generatedCodeLine++;
this.raw[this.generatedCodeLine] = this.rawSegments = [];
}
this.generatedCodeColumn = 0;
}
this.generatedCodeColumn += lines[lines.length - 1].length;
}
}
const n$1 = '\n';
const warned = {
insertLeft: false,
insertRight: false,
storeName: false,
};
class MagicString {
constructor(string, options = {}) {
const chunk = new Chunk(0, string.length, string);
Object.defineProperties(this, {
original: { writable: true, value: string },
outro: { writable: true, value: '' },
intro: { writable: true, value: '' },
firstChunk: { writable: true, value: chunk },
lastChunk: { writable: true, value: chunk },
lastSearchedChunk: { writable: true, value: chunk },
byStart: { writable: true, value: {} },
byEnd: { writable: true, value: {} },
filename: { writable: true, value: options.filename },
indentExclusionRanges: { writable: true, value: options.indentExclusionRanges },
sourcemapLocations: { writable: true, value: new BitSet() },
storedNames: { writable: true, value: {} },
indentStr: { writable: true, value: undefined },
ignoreList: { writable: true, value: options.ignoreList },
});
this.byStart[0] = chunk;
this.byEnd[string.length] = chunk;
}
addSourcemapLocation(char) {
this.sourcemapLocations.add(char);
}
append(content) {
if (typeof content !== 'string') throw new TypeError('outro content must be a string');
this.outro += content;
return this;
}
appendLeft(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byEnd[index];
if (chunk) {
chunk.appendLeft(content);
} else {
this.intro += content;
}
return this;
}
appendRight(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byStart[index];
if (chunk) {
chunk.appendRight(content);
} else {
this.outro += content;
}
return this;
}
clone() {
const cloned = new MagicString(this.original, { filename: this.filename });
let originalChunk = this.firstChunk;
let clonedChunk = (cloned.firstChunk = cloned.lastSearchedChunk = originalChunk.clone());
while (originalChunk) {
cloned.byStart[clonedChunk.start] = clonedChunk;
cloned.byEnd[clonedChunk.end] = clonedChunk;
const nextOriginalChunk = originalChunk.next;
const nextClonedChunk = nextOriginalChunk && nextOriginalChunk.clone();
if (nextClonedChunk) {
clonedChunk.next = nextClonedChunk;
nextClonedChunk.previous = clonedChunk;
clonedChunk = nextClonedChunk;
}
originalChunk = nextOriginalChunk;
}
cloned.lastChunk = clonedChunk;
if (this.indentExclusionRanges) {
cloned.indentExclusionRanges = this.indentExclusionRanges.slice();
}
cloned.sourcemapLocations = new BitSet(this.sourcemapLocations);
cloned.intro = this.intro;
cloned.outro = this.outro;
return cloned;
}
generateDecodedMap(options) {
options = options || {};
const sourceIndex = 0;
const names = Object.keys(this.storedNames);
const mappings = new Mappings(options.hires);
const locate = getLocator(this.original);
if (this.intro) {
mappings.advance(this.intro);
}
this.firstChunk.eachNext((chunk) => {
const loc = locate(chunk.start);
if (chunk.intro.length) mappings.advance(chunk.intro);
if (chunk.edited) {
mappings.addEdit(
sourceIndex,
chunk.content,
loc,
chunk.storeName ? names.indexOf(chunk.original) : -1,
);
} else {
mappings.addUneditedChunk(sourceIndex, chunk, this.original, loc, this.sourcemapLocations);
}
if (chunk.outro.length) mappings.advance(chunk.outro);
});
return {
file: options.file ? options.file.split(/[/\\]/).pop() : undefined,
sources: [
options.source ? getRelativePath(options.file || '', options.source) : options.file || '',
],
sourcesContent: options.includeContent ? [this.original] : undefined,
names,
mappings: mappings.raw,
x_google_ignoreList: this.ignoreList ? [sourceIndex] : undefined,
};
}
generateMap(options) {
return new SourceMap$1(this.generateDecodedMap(options));
}
_ensureindentStr() {
if (this.indentStr === undefined) {
this.indentStr = guessIndent(this.original);
}
}
_getRawIndentString() {
this._ensureindentStr();
return this.indentStr;
}
getIndentString() {
this._ensureindentStr();
return this.indentStr === null ? '\t' : this.indentStr;
}
indent(indentStr, options) {
const pattern = /^[^\r\n]/gm;
if (isObject$2(indentStr)) {
options = indentStr;
indentStr = undefined;
}
if (indentStr === undefined) {
this._ensureindentStr();
indentStr = this.indentStr || '\t';
}
if (indentStr === '') return this; // noop
options = options || {};
// Process exclusion ranges
const isExcluded = {};
if (options.exclude) {
const exclusions =
typeof options.exclude[0] === 'number' ? [options.exclude] : options.exclude;
exclusions.forEach((exclusion) => {
for (let i = exclusion[0]; i < exclusion[1]; i += 1) {
isExcluded[i] = true;
}
});
}
let shouldIndentNextCharacter = options.indentStart !== false;
const replacer = (match) => {
if (shouldIndentNextCharacter) return `${indentStr}${match}`;
shouldIndentNextCharacter = true;
return match;
};
this.intro = this.intro.replace(pattern, replacer);
let charIndex = 0;
let chunk = this.firstChunk;
while (chunk) {
const end = chunk.end;
if (chunk.edited) {
if (!isExcluded[charIndex]) {
chunk.content = chunk.content.replace(pattern, replacer);
if (chunk.content.length) {
shouldIndentNextCharacter = chunk.content[chunk.content.length - 1] === '\n';
}
}
} else {
charIndex = chunk.start;
while (charIndex < end) {
if (!isExcluded[charIndex]) {
const char = this.original[charIndex];
if (char === '\n') {
shouldIndentNextCharacter = true;
} else if (char !== '\r' && shouldIndentNextCharacter) {
shouldIndentNextCharacter = false;
if (charIndex === chunk.start) {
chunk.prependRight(indentStr);
} else {
this._splitChunk(chunk, charIndex);
chunk = chunk.next;
chunk.prependRight(indentStr);
}
}
}
charIndex += 1;
}
}
charIndex = chunk.end;
chunk = chunk.next;
}
this.outro = this.outro.replace(pattern, replacer);
return this;
}
insert() {
throw new Error(
'magicString.insert(...) is deprecated. Use prependRight(...) or appendLeft(...)',
);
}
insertLeft(index, content) {
if (!warned.insertLeft) {
console.warn(
'magicString.insertLeft(...) is deprecated. Use magicString.appendLeft(...) instead',
); // eslint-disable-line no-console
warned.insertLeft = true;
}
return this.appendLeft(index, content);
}
insertRight(index, content) {
if (!warned.insertRight) {
console.warn(
'magicString.insertRight(...) is deprecated. Use magicString.prependRight(...) instead',
); // eslint-disable-line no-console
warned.insertRight = true;
}
return this.prependRight(index, content);
}
move(start, end, index) {
if (index >= start && index <= end) throw new Error('Cannot move a selection inside itself');
this._split(start);
this._split(end);
this._split(index);
const first = this.byStart[start];
const last = this.byEnd[end];
const oldLeft = first.previous;
const oldRight = last.next;
const newRight = this.byStart[index];
if (!newRight && last === this.lastChunk) return this;
const newLeft = newRight ? newRight.previous : this.lastChunk;
if (oldLeft) oldLeft.next = oldRight;
if (oldRight) oldRight.previous = oldLeft;
if (newLeft) newLeft.next = first;
if (newRight) newRight.previous = last;
if (!first.previous) this.firstChunk = last.next;
if (!last.next) {
this.lastChunk = first.previous;
this.lastChunk.next = null;
}
first.previous = newLeft;
last.next = newRight || null;
if (!newLeft) this.firstChunk = first;
if (!newRight) this.lastChunk = last;
return this;
}
overwrite(start, end, content, options) {
options = options || {};
return this.update(start, end, content, { ...options, overwrite: !options.contentOnly });
}
update(start, end, content, options) {
if (typeof content !== 'string') throw new TypeError('replacement content must be a string');
if (this.original.length !== 0) {
while (start < 0) start += this.original.length;
while (end < 0) end += this.original.length;
}
if (end > this.original.length) throw new Error('end is out of bounds');
if (start === end)
throw new Error(
'Cannot overwrite a zero-length range – use appendLeft or prependRight instead',
);
this._split(start);
this._split(end);
if (options === true) {
if (!warned.storeName) {
console.warn(
'The final argument to magicString.overwrite(...) should be an options object. See https://github.com/rich-harris/magic-string',
); // eslint-disable-line no-console
warned.storeName = true;
}
options = { storeName: true };
}
const storeName = options !== undefined ? options.storeName : false;
const overwrite = options !== undefined ? options.overwrite : false;
if (storeName) {
const original = this.original.slice(start, end);
Object.defineProperty(this.storedNames, original, {
writable: true,
value: true,
enumerable: true,
});
}
const first = this.byStart[start];
const last = this.byEnd[end];
if (first) {
let chunk = first;
while (chunk !== last) {
if (chunk.next !== this.byStart[chunk.end]) {
throw new Error('Cannot overwrite across a split point');
}
chunk = chunk.next;
chunk.edit('', false);
}
first.edit(content, storeName, !overwrite);
} else {
// must be inserting at the end
const newChunk = new Chunk(start, end, '').edit(content, storeName);
// TODO last chunk in the array may not be the last chunk, if it's moved...
last.next = newChunk;
newChunk.previous = last;
}
return this;
}
prepend(content) {
if (typeof content !== 'string') throw new TypeError('outro content must be a string');
this.intro = content + this.intro;
return this;
}
prependLeft(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byEnd[index];
if (chunk) {
chunk.prependLeft(content);
} else {
this.intro = content + this.intro;
}
return this;
}
prependRight(index, content) {
if (typeof content !== 'string') throw new TypeError('inserted content must be a string');
this._split(index);
const chunk = this.byStart[index];
if (chunk) {
chunk.prependRight(content);
} else {
this.outro = content + this.outro;
}
return this;
}
remove(start, end) {
if (this.original.length !== 0) {
while (start < 0) start += this.original.length;
while (end < 0) end += this.original.length;
}
if (start === end) return this;
if (start < 0 || end > this.original.length) throw new Error('Character is out of bounds');
if (start > end) throw new Error('end must be greater than start');
this._split(start);
this._split(end);
let chunk = this.byStart[start];
while (chunk) {
chunk.intro = '';
chunk.outro = '';
chunk.edit('');
chunk = end > chunk.end ? this.byStart[chunk.end] : null;
}
return this;
}
reset(start, end) {
if (this.original.length !== 0) {
while (start < 0) start += this.original.length;
while (end < 0) end += this.original.length;
}
if (start === end) return this;
if (start < 0 || end > this.original.length) throw new Error('Character is out of bounds');
if (start > end) throw new Error('end must be greater than start');
this._split(start);
this._split(end);
let chunk = this.byStart[start];
while (chunk) {
chunk.reset();
chunk = end > chunk.end ? this.byStart[chunk.end] : null;
}
return this;
}
lastChar() {
if (this.outro.length) return this.outro[this.outro.length - 1];
let chunk = this.lastChunk;
do {
if (chunk.outro.length) return chunk.outro[chunk.outro.length - 1];
if (chunk.content.length) return chunk.content[chunk.content.length - 1];
if (chunk.intro.length) return chunk.intro[chunk.intro.length - 1];
} while ((chunk = chunk.previous));
if (this.intro.length) return this.intro[this.intro.length - 1];
return '';
}
lastLine() {
let lineIndex = this.outro.lastIndexOf(n$1);
if (lineIndex !== -1) return this.outro.substr(lineIndex + 1);
let lineStr = this.outro;
let chunk = this.lastChunk;
do {
if (chunk.outro.length > 0) {
lineIndex = chunk.outro.lastIndexOf(n$1);
if (lineIndex !== -1) return chunk.outro.substr(lineIndex + 1) + lineStr;
lineStr = chunk.outro + lineStr;
}
if (chunk.content.length > 0) {
lineIndex = chunk.content.lastIndexOf(n$1);
if (lineIndex !== -1) return chunk.content.substr(lineIndex + 1) + lineStr;
lineStr = chunk.content + lineStr;
}
if (chunk.intro.length > 0) {
lineIndex = chunk.intro.lastIndexOf(n$1);
if (lineIndex !== -1) return chunk.intro.substr(lineIndex + 1) + lineStr;
lineStr = chunk.intro + lineStr;
}
} while ((chunk = chunk.previous));
lineIndex = this.intro.lastIndexOf(n$1);
if (lineIndex !== -1) return this.intro.substr(lineIndex + 1) + lineStr;
return this.intro + lineStr;
}
slice(start = 0, end = this.original.length) {
if (this.original.length !== 0) {
while (start < 0) start += this.original.length;
while (end < 0) end += this.original.length;
}
let result = '';
// find start chunk
let chunk = this.firstChunk;
while (chunk && (chunk.start > start || chunk.end <= start)) {
// found end chunk before start
if (chunk.start < end && chunk.end >= end) {
return result;
}
chunk = chunk.next;
}
if (chunk && chunk.edited && chunk.start !== start)
throw new Error(`Cannot use replaced character ${start} as slice start anchor.`);
const startChunk = chunk;
while (chunk) {
if (chunk.intro && (startChunk !== chunk || chunk.start === start)) {
result += chunk.intro;
}
const containsEnd = chunk.start < end && chunk.end >= end;
if (containsEnd && chunk.edited && chunk.end !== end)
throw new Error(`Cannot use replaced character ${end} as slice end anchor.`);
const sliceStart = startChunk === chunk ? start - chunk.start : 0;
const sliceEnd = containsEnd ? chunk.content.length + end - chunk.end : chunk.content.length;
result += chunk.content.slice(sliceStart, sliceEnd);
if (chunk.outro && (!containsEnd || chunk.end === end)) {
result += chunk.outro;
}
if (containsEnd) {
break;
}
chunk = chunk.next;
}
return result;
}
// TODO deprecate this? not really very useful
snip(start, end) {
const clone = this.clone();
clone.remove(0, start);
clone.remove(end, clone.original.length);
return clone;
}
_split(index) {
if (this.byStart[index] || this.byEnd[index]) return;
let chunk = this.lastSearchedChunk;
const searchForward = index > chunk.end;
while (chunk) {
if (chunk.contains(index)) return this._splitChunk(chunk, index);
chunk = searchForward ? this.byStart[chunk.end] : this.byEnd[chunk.start];
}
}
_splitChunk(chunk, index) {
if (chunk.edited && chunk.content.length) {
// zero-length edited chunks are a special case (overlapping replacements)
const loc = getLocator(this.original)(index);
throw new Error(
`Cannot split a chunk that has already been edited (${loc.line}:${loc.column} – "${chunk.original}")`,
);
}
const newChunk = chunk.split(index);
this.byEnd[index] = chunk;
this.byStart[index] = newChunk;
this.byEnd[newChunk.end] = newChunk;
if (chunk === this.lastChunk) this.lastChunk = newChunk;
this.lastSearchedChunk = chunk;
return true;
}
toString() {
let str = this.intro;
let chunk = this.firstChunk;
while (chunk) {
str += chunk.toString();
chunk = chunk.next;
}
return str + this.outro;
}
isEmpty() {
let chunk = this.firstChunk;
do {
if (
(chunk.intro.length && chunk.intro.trim()) ||
(chunk.content.length && chunk.content.trim()) ||
(chunk.outro.length && chunk.outro.trim())
)
return false;
} while ((chunk = chunk.next));
return true;
}
length() {
let chunk = this.firstChunk;
let length = 0;
do {
length += chunk.intro.length + chunk.content.length + chunk.outro.length;
} while ((chunk = chunk.next));
return length;
}
trimLines() {
return this.trim('[\\r\\n]');
}
trim(charType) {
return this.trimStart(charType).trimEnd(charType);
}
trimEndAborted(charType) {
const rx = new RegExp((charType || '\\s') + '+$');
this.outro = this.outro.replace(rx, '');
if (this.outro.length) return true;
let chunk = this.lastChunk;
do {
const end = chunk.end;
const aborted = chunk.trimEnd(rx);
// if chunk was trimmed, we have a new lastChunk
if (chunk.end !== end) {
if (this.lastChunk === chunk) {
this.lastChunk = chunk.next;
}
this.byEnd[chunk.end] = chunk;
this.byStart[chunk.next.start] = chunk.next;
this.byEnd[chunk.next.end] = chunk.next;
}
if (aborted) return true;
chunk = chunk.previous;
} while (chunk);
return false;
}
trimEnd(charType) {
this.trimEndAborted(charType);
return this;
}
trimStartAborted(charType) {
const rx = new RegExp('^' + (charType || '\\s') + '+');
this.intro = this.intro.replace(rx, '');
if (this.intro.length) return true;
let chunk = this.firstChunk;
do {
const end = chunk.end;
const aborted = chunk.trimStart(rx);
if (chunk.end !== end) {
// special case...
if (chunk === this.lastChunk) this.lastChunk = chunk.next;
this.byEnd[chunk.end] = chunk;
this.byStart[chunk.next.start] = chunk.next;
this.byEnd[chunk.next.end] = chunk.next;
}
if (aborted) return true;
chunk = chunk.next;
} while (chunk);
return false;
}
trimStart(charType) {
this.trimStartAborted(charType);
return this;
}
hasChanged() {
return this.original !== this.toString();
}
_replaceRegexp(searchValue, replacement) {
function getReplacement(match, str) {
if (typeof replacement === 'string') {
return replacement.replace(/\$(\$|&|\d+)/g, (_, i) => {
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/replace#specifying_a_string_as_a_parameter
if (i === '$') return '$';
if (i === '&') return match[0];
const num = +i;
if (num < match.length) return match[+i];
return `$${i}`;
});
} else {
return replacement(...match, match.index, str, match.groups);
}
}
function matchAll(re, str) {
let match;
const matches = [];
while ((match = re.exec(str))) {
matches.push(match);
}
return matches;
}
if (searchValue.global) {
const matches = matchAll(searchValue, this.original);
matches.forEach((match) => {
if (match.index != null) {
const replacement = getReplacement(match, this.original);
if (replacement !== match[0]) {
this.overwrite(
match.index,
match.index + match[0].length,
replacement
);
}
}
});
} else {
const match = this.original.match(searchValue);
if (match && match.index != null) {
const replacement = getReplacement(match, this.original);
if (replacement !== match[0]) {
this.overwrite(
match.index,
match.index + match[0].length,
replacement
);
}
}
}
return this;
}
_replaceString(string, replacement) {
const { original } = this;
const index = original.indexOf(string);
if (index !== -1) {
this.overwrite(index, index + string.length, replacement);
}
return this;
}
replace(searchValue, replacement) {
if (typeof searchValue === 'string') {
return this._replaceString(searchValue, replacement);
}
return this._replaceRegexp(searchValue, replacement);
}
_replaceAllString(string, replacement) {
const { original } = this;
const stringLength = string.length;
for (
let index = original.indexOf(string);
index !== -1;
index = original.indexOf(string, index + stringLength)
) {
const previous = original.slice(index, index + stringLength);
if (previous !== replacement)
this.overwrite(index, index + stringLength, replacement);
}
return this;
}
replaceAll(searchValue, replacement) {
if (typeof searchValue === 'string') {
return this._replaceAllString(searchValue, replacement);
}
if (!searchValue.global) {
throw new TypeError(
'MagicString.prototype.replaceAll called with a non-global RegExp argument',
);
}
return this._replaceRegexp(searchValue, replacement);
}
}
function isReference(node, parent) {
if (node.type === 'MemberExpression') {
return !node.computed && isReference(node.object, node);
}
if (node.type === 'Identifier') {
if (!parent)
return true;
switch (parent.type) {
// disregard `bar` in `foo.bar`
case 'MemberExpression': return parent.computed || node === parent.object;
// disregard the `foo` in `class {foo(){}}` but keep it in `class {[foo](){}}`
case 'MethodDefinition': return parent.computed;
// disregard the `foo` in `class {foo=bar}` but keep it in `class {[foo]=bar}` and `class {bar=foo}`
case 'FieldDefinition': return parent.computed || node === parent.value;
// disregard the `bar` in `{ bar: foo }`, but keep it in `{ [bar]: foo }`
case 'Property': return parent.computed || node === parent.value;
// disregard the `bar` in `export { foo as bar }` or
// the foo in `import { foo as bar }`
case 'ExportSpecifier':
case 'ImportSpecifier': return node === parent.local;
// disregard the `foo` in `foo: while (...) { ... break foo; ... continue foo;}`
case 'LabeledStatement':
case 'BreakStatement':
case 'ContinueStatement': return false;
default: return true;
}
}
return false;
}
var version$2 = "26.0.1";
var peerDependencies = {
rollup: "^2.68.0||^3.0.0||^4.0.0"
};
function tryParse(parse, code, id) {
try {
return parse(code, { allowReturnOutsideFunction: true });
} catch (err) {
err.message += ` in ${id}`;
throw err;
}
}
const firstpassGlobal = /\b(?:require|module|exports|global)\b/;
const firstpassNoGlobal = /\b(?:require|module|exports)\b/;
function hasCjsKeywords(code, ignoreGlobal) {
const firstpass = ignoreGlobal ? firstpassNoGlobal : firstpassGlobal;
return firstpass.test(code);
}
/* eslint-disable no-underscore-dangle */
function analyzeTopLevelStatements(parse, code, id) {
const ast = tryParse(parse, code, id);
let isEsModule = false;
let hasDefaultExport = false;
let hasNamedExports = false;
for (const node of ast.body) {
switch (node.type) {
case 'ExportDefaultDeclaration':
isEsModule = true;
hasDefaultExport = true;
break;
case 'ExportNamedDeclaration':
isEsModule = true;
if (node.declaration) {
hasNamedExports = true;
} else {
for (const specifier of node.specifiers) {
if (specifier.exported.name === 'default') {
hasDefaultExport = true;
} else {
hasNamedExports = true;
}
}
}
break;
case 'ExportAllDeclaration':
isEsModule = true;
if (node.exported && node.exported.name === 'default') {
hasDefaultExport = true;
} else {
hasNamedExports = true;
}
break;
case 'ImportDeclaration':
isEsModule = true;
break;
}
}
return { isEsModule, hasDefaultExport, hasNamedExports, ast };
}
/* eslint-disable import/prefer-default-export */
function deconflict(scopes, globals, identifier) {
let i = 1;
let deconflicted = makeLegalIdentifier(identifier);
const hasConflicts = () =>
scopes.some((scope) => scope.contains(deconflicted)) || globals.has(deconflicted);
while (hasConflicts()) {
deconflicted = makeLegalIdentifier(`${identifier}_${i}`);
i += 1;
}
for (const scope of scopes) {
scope.declarations[deconflicted] = true;
}
return deconflicted;
}
function getName(id) {
const name = makeLegalIdentifier(basename$1(id, extname(id)));
if (name !== 'index') {
return name;
}
return makeLegalIdentifier(basename$1(dirname$1(id)));
}
function normalizePathSlashes(path) {
return path.replace(/\\/g, '/');
}
const getVirtualPathForDynamicRequirePath = (path, commonDir) =>
`/${normalizePathSlashes(relative$1(commonDir, path))}`;
function capitalize(name) {
return name[0].toUpperCase() + name.slice(1);
}
function getStrictRequiresFilter({ strictRequires }) {
switch (strictRequires) {
case true:
return { strictRequiresFilter: () => true, detectCyclesAndConditional: false };
// eslint-disable-next-line no-undefined
case undefined:
case 'auto':
case 'debug':
case null:
return { strictRequiresFilter: () => false, detectCyclesAndConditional: true };
case false:
return { strictRequiresFilter: () => false, detectCyclesAndConditional: false };
default:
if (typeof strictRequires === 'string' || Array.isArray(strictRequires)) {
return {
strictRequiresFilter: createFilter$1(strictRequires),
detectCyclesAndConditional: false
};
}
throw new Error('Unexpected value for "strictRequires" option.');
}
}
function getPackageEntryPoint(dirPath) {
let entryPoint = 'index.js';
try {
if (existsSync(join$1(dirPath, 'package.json'))) {
entryPoint =
JSON.parse(readFileSync(join$1(dirPath, 'package.json'), { encoding: 'utf8' })).main ||
entryPoint;
}
} catch (ignored) {
// ignored
}
return entryPoint;
}
function isDirectory$1(path) {
try {
if (statSync$1(path).isDirectory()) return true;
} catch (ignored) {
// Nothing to do here
}
return false;
}
function getDynamicRequireModules(patterns, dynamicRequireRoot) {
const dynamicRequireModules = new Map();
const dirNames = new Set();
for (const pattern of !patterns || Array.isArray(patterns) ? patterns || [] : [patterns]) {
const isNegated = pattern.startsWith('!');
const modifyMap = (targetPath, resolvedPath) =>
isNegated
? dynamicRequireModules.delete(targetPath)
: dynamicRequireModules.set(targetPath, resolvedPath);
for (const path of glob$1
.sync(isNegated ? pattern.substr(1) : pattern)
.sort((a, b) => a.localeCompare(b, 'en'))) {
const resolvedPath = resolve$3(path);
const requirePath = normalizePathSlashes(resolvedPath);
if (isDirectory$1(resolvedPath)) {
dirNames.add(resolvedPath);
const modulePath = resolve$3(join$1(resolvedPath, getPackageEntryPoint(path)));
modifyMap(requirePath, modulePath);
modifyMap(normalizePathSlashes(modulePath), modulePath);
} else {
dirNames.add(dirname$1(resolvedPath));
modifyMap(requirePath, resolvedPath);
}
}
}
return {
commonDir: dirNames.size ? getCommonDir([...dirNames, dynamicRequireRoot]) : null,
dynamicRequireModules
};
}
const FAILED_REQUIRE_ERROR = `throw new Error('Could not dynamically require "' + path + '". Please configure the dynamicRequireTargets or/and ignoreDynamicRequires option of @rollup/plugin-commonjs appropriately for this require call to work.');`;
const COMMONJS_REQUIRE_EXPORT = 'commonjsRequire';
const CREATE_COMMONJS_REQUIRE_EXPORT = 'createCommonjsRequire';
function getDynamicModuleRegistry(
isDynamicRequireModulesEnabled,
dynamicRequireModules,
commonDir,
ignoreDynamicRequires
) {
if (!isDynamicRequireModulesEnabled) {
return `export function ${COMMONJS_REQUIRE_EXPORT}(path) {
${FAILED_REQUIRE_ERROR}
}`;
}
const dynamicModuleImports = [...dynamicRequireModules.values()]
.map(
(id, index) =>
`import ${
id.endsWith('.json') ? `json${index}` : `{ __require as require${index} }`
} from ${JSON.stringify(id)};`
)
.join('\n');
const dynamicModuleProps = [...dynamicRequireModules.keys()]
.map(
(id, index) =>
`\t\t${JSON.stringify(getVirtualPathForDynamicRequirePath(id, commonDir))}: ${
id.endsWith('.json') ? `function () { return json${index}; }` : `require${index}`
}`
)
.join(',\n');
return `${dynamicModuleImports}
var dynamicModules;
function getDynamicModules() {
return dynamicModules || (dynamicModules = {
${dynamicModuleProps}
});
}
export function ${CREATE_COMMONJS_REQUIRE_EXPORT}(originalModuleDir) {
function handleRequire(path) {
var resolvedPath = commonjsResolve(path, originalModuleDir);
if (resolvedPath !== null) {
return getDynamicModules()[resolvedPath]();
}
${ignoreDynamicRequires ? 'return require(path);' : FAILED_REQUIRE_ERROR}
}
handleRequire.resolve = function (path) {
var resolvedPath = commonjsResolve(path, originalModuleDir);
if (resolvedPath !== null) {
return resolvedPath;
}
return require.resolve(path);
}
return handleRequire;
}
function commonjsResolve (path, originalModuleDir) {
var shouldTryNodeModules = isPossibleNodeModulesPath(path);
path = normalize(path);
var relPath;
if (path[0] === '/') {
originalModuleDir = '';
}
var modules = getDynamicModules();
var checkedExtensions = ['', '.js', '.json'];
while (true) {
if (!shouldTryNodeModules) {
relPath = normalize(originalModuleDir + '/' + path);
} else {
relPath = normalize(originalModuleDir + '/node_modules/' + path);
}
if (relPath.endsWith('/..')) {
break; // Travelled too far up, avoid infinite loop
}
for (var extensionIndex = 0; extensionIndex < checkedExtensions.length; extensionIndex++) {
var resolvedPath = relPath + checkedExtensions[extensionIndex];
if (modules[resolvedPath]) {
return resolvedPath;
}
}
if (!shouldTryNodeModules) break;
var nextDir = normalize(originalModuleDir + '/..');
if (nextDir === originalModuleDir) break;
originalModuleDir = nextDir;
}
return null;
}
function isPossibleNodeModulesPath (modulePath) {
var c0 = modulePath[0];
if (c0 === '/' || c0 === '\\\\') return false;
var c1 = modulePath[1], c2 = modulePath[2];
if ((c0 === '.' && (!c1 || c1 === '/' || c1 === '\\\\')) ||
(c0 === '.' && c1 === '.' && (!c2 || c2 === '/' || c2 === '\\\\'))) return false;
if (c1 === ':' && (c2 === '/' || c2 === '\\\\')) return false;
return true;
}
function normalize (path) {
path = path.replace(/\\\\/g, '/');
var parts = path.split('/');
var slashed = parts[0] === '';
for (var i = 1; i < parts.length; i++) {
if (parts[i] === '.' || parts[i] === '') {
parts.splice(i--, 1);
}
}
for (var i = 1; i < parts.length; i++) {
if (parts[i] !== '..') continue;
if (i > 0 && parts[i - 1] !== '..' && parts[i - 1] !== '.') {
parts.splice(--i, 2);
i--;
}
}
path = parts.join('/');
if (slashed && path[0] !== '/') path = '/' + path;
else if (path.length === 0) path = '.';
return path;
}`;
}
const isWrappedId = (id, suffix) => id.endsWith(suffix);
const wrapId = (id, suffix) => `\0${id}${suffix}`;
const unwrapId = (wrappedId, suffix) => wrappedId.slice(1, -suffix.length);
const PROXY_SUFFIX = '?commonjs-proxy';
const WRAPPED_SUFFIX = '?commonjs-wrapped';
const EXTERNAL_SUFFIX = '?commonjs-external';
const EXPORTS_SUFFIX = '?commonjs-exports';
const MODULE_SUFFIX = '?commonjs-module';
const ENTRY_SUFFIX = '?commonjs-entry';
const ES_IMPORT_SUFFIX = '?commonjs-es-import';
const DYNAMIC_MODULES_ID = '\0commonjs-dynamic-modules';
const HELPERS_ID = '\0commonjsHelpers.js';
const IS_WRAPPED_COMMONJS = 'withRequireFunction';
// `x['default']` is used instead of `x.default` for backward compatibility with ES3 browsers.
// Minifiers like uglify will usually transpile it back if compatibility with ES3 is not enabled.
// This could be improved by inspecting Rollup's "generatedCode" option
const HELPERS = `
export var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
export function getDefaultExportFromCjs (x) {
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
}
export function getDefaultExportFromNamespaceIfPresent (n) {
return n && Object.prototype.hasOwnProperty.call(n, 'default') ? n['default'] : n;
}
export function getDefaultExportFromNamespaceIfNotNamed (n) {
return n && Object.prototype.hasOwnProperty.call(n, 'default') && Object.keys(n).length === 1 ? n['default'] : n;
}
export function getAugmentedNamespace(n) {
if (n.__esModule) return n;
var f = n.default;
if (typeof f == "function") {
var a = function a () {
if (this instanceof a) {
return Reflect.construct(f, arguments, this.constructor);
}
return f.apply(this, arguments);
};
a.prototype = f.prototype;
} else a = {};
Object.defineProperty(a, '__esModule', {value: true});
Object.keys(n).forEach(function (k) {
var d = Object.getOwnPropertyDescriptor(n, k);
Object.defineProperty(a, k, d.get ? d : {
enumerable: true,
get: function () {
return n[k];
}
});
});
return a;
}
`;
function getHelpersModule() {
return HELPERS;
}
function getUnknownRequireProxy(id, requireReturnsDefault) {
if (requireReturnsDefault === true || id.endsWith('.json')) {
return `export { default } from ${JSON.stringify(id)};`;
}
const name = getName(id);
const exported =
requireReturnsDefault === 'auto'
? `import { getDefaultExportFromNamespaceIfNotNamed } from "${HELPERS_ID}"; export default /*@__PURE__*/getDefaultExportFromNamespaceIfNotNamed(${name});`
: requireReturnsDefault === 'preferred'
? `import { getDefaultExportFromNamespaceIfPresent } from "${HELPERS_ID}"; export default /*@__PURE__*/getDefaultExportFromNamespaceIfPresent(${name});`
: !requireReturnsDefault
? `import { getAugmentedNamespace } from "${HELPERS_ID}"; export default /*@__PURE__*/getAugmentedNamespace(${name});`
: `export default ${name};`;
return `import * as ${name} from ${JSON.stringify(id)}; ${exported}`;
}
async function getStaticRequireProxy(id, requireReturnsDefault, loadModule) {
const name = getName(id);
const {
meta: { commonjs: commonjsMeta }
} = await loadModule({ id });
if (!commonjsMeta) {
return getUnknownRequireProxy(id, requireReturnsDefault);
}
if (commonjsMeta.isCommonJS) {
return `export { __moduleExports as default } from ${JSON.stringify(id)};`;
}
if (!requireReturnsDefault) {
return `import { getAugmentedNamespace } from "${HELPERS_ID}"; import * as ${name} from ${JSON.stringify(
id
)}; export default /*@__PURE__*/getAugmentedNamespace(${name});`;
}
if (
requireReturnsDefault !== true &&
(requireReturnsDefault === 'namespace' ||
!commonjsMeta.hasDefaultExport ||
(requireReturnsDefault === 'auto' && commonjsMeta.hasNamedExports))
) {
return `import * as ${name} from ${JSON.stringify(id)}; export default ${name};`;
}
return `export { default } from ${JSON.stringify(id)};`;
}
function getEntryProxy(id, defaultIsModuleExports, getModuleInfo, shebang) {
const {
meta: { commonjs: commonjsMeta },
hasDefaultExport
} = getModuleInfo(id);
if (!commonjsMeta || commonjsMeta.isCommonJS !== IS_WRAPPED_COMMONJS) {
const stringifiedId = JSON.stringify(id);
let code = `export * from ${stringifiedId};`;
if (hasDefaultExport) {
code += `export { default } from ${stringifiedId};`;
}
return shebang + code;
}
const result = getEsImportProxy(id, defaultIsModuleExports);
return {
...result,
code: shebang + result.code
};
}
function getEsImportProxy(id, defaultIsModuleExports) {
const name = getName(id);
const exportsName = `${name}Exports`;
const requireModule = `require${capitalize(name)}`;
let code =
`import { getDefaultExportFromCjs } from "${HELPERS_ID}";\n` +
`import { __require as ${requireModule} } from ${JSON.stringify(id)};\n` +
`var ${exportsName} = ${requireModule}();\n` +
`export { ${exportsName} as __moduleExports };`;
if (defaultIsModuleExports === true) {
code += `\nexport { ${exportsName} as default };`;
} else {
code += `export default /*@__PURE__*/getDefaultExportFromCjs(${exportsName});`;
}
return {
code,
syntheticNamedExports: '__moduleExports'
};
}
/* eslint-disable no-param-reassign, no-undefined */
function getCandidatesForExtension(resolved, extension) {
return [resolved + extension, `${resolved}${sep$1}index${extension}`];
}
function getCandidates(resolved, extensions) {
return extensions.reduce(
(paths, extension) => paths.concat(getCandidatesForExtension(resolved, extension)),
[resolved]
);
}
function resolveExtensions(importee, importer, extensions) {
// not our problem
if (importee[0] !== '.' || !importer) return undefined;
const resolved = resolve$3(dirname$1(importer), importee);
const candidates = getCandidates(resolved, extensions);
for (let i = 0; i < candidates.length; i += 1) {
try {
const stats = statSync$1(candidates[i]);
if (stats.isFile()) return { id: candidates[i] };
} catch (err) {
/* noop */
}
}
return undefined;
}
function getResolveId(extensions, isPossibleCjsId) {
const currentlyResolving = new Map();
return {
/**
* This is a Maps of importers to Sets of require sources being resolved at
* the moment by resolveRequireSourcesAndUpdateMeta
*/
currentlyResolving,
async resolveId(importee, importer, resolveOptions) {
const customOptions = resolveOptions.custom;
// All logic below is specific to ES imports.
// Also, if we do not skip this logic for requires that are resolved while
// transforming a commonjs file, it can easily lead to deadlocks.
if (
customOptions &&
customOptions['node-resolve'] &&
customOptions['node-resolve'].isRequire
) {
return null;
}
const currentlyResolvingForParent = currentlyResolving.get(importer);
if (currentlyResolvingForParent && currentlyResolvingForParent.has(importee)) {
this.warn({
code: 'THIS_RESOLVE_WITHOUT_OPTIONS',
message:
'It appears a plugin has implemented a "resolveId" hook that uses "this.resolve" without forwarding the third "options" parameter of "resolveId". This is problematic as it can lead to wrong module resolutions especially for the node-resolve plugin and in certain cases cause early exit errors for the commonjs plugin.\nIn rare cases, this warning can appear if the same file is both imported and required from the same mixed ES/CommonJS module, in which case it can be ignored.',
url: 'https://rollupjs.org/guide/en/#resolveid'
});
return null;
}
if (isWrappedId(importee, WRAPPED_SUFFIX)) {
return unwrapId(importee, WRAPPED_SUFFIX);
}
if (
importee.endsWith(ENTRY_SUFFIX) ||
isWrappedId(importee, MODULE_SUFFIX) ||
isWrappedId(importee, EXPORTS_SUFFIX) ||
isWrappedId(importee, PROXY_SUFFIX) ||
isWrappedId(importee, ES_IMPORT_SUFFIX) ||
isWrappedId(importee, EXTERNAL_SUFFIX) ||
importee.startsWith(HELPERS_ID) ||
importee === DYNAMIC_MODULES_ID
) {
return importee;
}
if (importer) {
if (
importer === DYNAMIC_MODULES_ID ||
// Proxies are only importing resolved ids, no need to resolve again
isWrappedId(importer, PROXY_SUFFIX) ||
isWrappedId(importer, ES_IMPORT_SUFFIX) ||
importer.endsWith(ENTRY_SUFFIX)
) {
return importee;
}
if (isWrappedId(importer, EXTERNAL_SUFFIX)) {
// We need to return null for unresolved imports so that the proper warning is shown
if (
!(await this.resolve(
importee,
importer,
Object.assign({ skipSelf: true }, resolveOptions)
))
) {
return null;
}
// For other external imports, we need to make sure they are handled as external
return { id: importee, external: true };
}
}
if (importee.startsWith('\0')) {
return null;
}
// If this is an entry point or ESM import, we need to figure out if the importee is wrapped and
// if that is the case, we need to add a proxy.
const resolved =
(await this.resolve(
importee,
importer,
Object.assign({ skipSelf: true }, resolveOptions)
)) || resolveExtensions(importee, importer, extensions);
// Make sure that even if other plugins resolve again, we ignore our own proxies
if (
!resolved ||
resolved.external ||
resolved.id.endsWith(ENTRY_SUFFIX) ||
isWrappedId(resolved.id, ES_IMPORT_SUFFIX) ||
!isPossibleCjsId(resolved.id)
) {
return resolved;
}
const moduleInfo = await this.load(resolved);
const {
meta: { commonjs: commonjsMeta }
} = moduleInfo;
if (commonjsMeta) {
const { isCommonJS } = commonjsMeta;
if (isCommonJS) {
if (resolveOptions.isEntry) {
moduleInfo.moduleSideEffects = true;
// We must not precede entry proxies with a `\0` as that will mess up relative external resolution
return resolved.id + ENTRY_SUFFIX;
}
if (isCommonJS === IS_WRAPPED_COMMONJS) {
return { id: wrapId(resolved.id, ES_IMPORT_SUFFIX), meta: { commonjs: { resolved } } };
}
}
}
return resolved;
}
};
}
function getRequireResolver(extensions, detectCyclesAndConditional, currentlyResolving) {
const knownCjsModuleTypes = Object.create(null);
const requiredIds = Object.create(null);
const unconditionallyRequiredIds = Object.create(null);
const dependencies = Object.create(null);
const getDependencies = (id) => dependencies[id] || (dependencies[id] = new Set());
const isCyclic = (id) => {
const dependenciesToCheck = new Set(getDependencies(id));
for (const dependency of dependenciesToCheck) {
if (dependency === id) {
return true;
}
for (const childDependency of getDependencies(dependency)) {
dependenciesToCheck.add(childDependency);
}
}
return false;
};
// Once a module is listed here, its type (wrapped or not) is fixed and may
// not change for the rest of the current build, to not break already
// transformed modules.
const fullyAnalyzedModules = Object.create(null);
const getTypeForFullyAnalyzedModule = (id) => {
const knownType = knownCjsModuleTypes[id];
if (knownType !== true || !detectCyclesAndConditional || fullyAnalyzedModules[id]) {
return knownType;
}
if (isCyclic(id)) {
return (knownCjsModuleTypes[id] = IS_WRAPPED_COMMONJS);
}
return knownType;
};
const setInitialParentType = (id, initialCommonJSType) => {
// Fully analyzed modules may never change type
if (fullyAnalyzedModules[id]) {
return;
}
knownCjsModuleTypes[id] = initialCommonJSType;
if (
detectCyclesAndConditional &&
knownCjsModuleTypes[id] === true &&
requiredIds[id] &&
!unconditionallyRequiredIds[id]
) {
knownCjsModuleTypes[id] = IS_WRAPPED_COMMONJS;
}
};
const analyzeRequiredModule = async (parentId, resolved, isConditional, loadModule) => {
const childId = resolved.id;
requiredIds[childId] = true;
if (!(isConditional || knownCjsModuleTypes[parentId] === IS_WRAPPED_COMMONJS)) {
unconditionallyRequiredIds[childId] = true;
}
getDependencies(parentId).add(childId);
if (!isCyclic(childId)) {
// This makes sure the current transform handler waits for all direct
// dependencies to be loaded and transformed and therefore for all
// transitive CommonJS dependencies to be loaded as well so that all
// cycles have been found and knownCjsModuleTypes is reliable.
await loadModule(resolved);
}
};
const getTypeForImportedModule = async (resolved, loadModule) => {
if (resolved.id in knownCjsModuleTypes) {
// This handles cyclic ES dependencies
return knownCjsModuleTypes[resolved.id];
}
const {
meta: { commonjs }
} = await loadModule(resolved);
return (commonjs && commonjs.isCommonJS) || false;
};
return {
getWrappedIds: () =>
Object.keys(knownCjsModuleTypes).filter(
(id) => knownCjsModuleTypes[id] === IS_WRAPPED_COMMONJS
),
isRequiredId: (id) => requiredIds[id],
async shouldTransformCachedModule({
id: parentId,
resolvedSources,
meta: { commonjs: parentMeta }
}) {
// We explicitly track ES modules to handle circular imports
if (!(parentMeta && parentMeta.isCommonJS)) knownCjsModuleTypes[parentId] = false;
if (isWrappedId(parentId, ES_IMPORT_SUFFIX)) return false;
const parentRequires = parentMeta && parentMeta.requires;
if (parentRequires) {
setInitialParentType(parentId, parentMeta.initialCommonJSType);
await Promise.all(
parentRequires.map(({ resolved, isConditional }) =>
analyzeRequiredModule(parentId, resolved, isConditional, this.load)
)
);
if (getTypeForFullyAnalyzedModule(parentId) !== parentMeta.isCommonJS) {
return true;
}
for (const {
resolved: { id }
} of parentRequires) {
if (getTypeForFullyAnalyzedModule(id) !== parentMeta.isRequiredCommonJS[id]) {
return true;
}
}
// Now that we decided to go with the cached copy, neither the parent
// module nor any of its children may change types anymore
fullyAnalyzedModules[parentId] = true;
for (const {
resolved: { id }
} of parentRequires) {
fullyAnalyzedModules[id] = true;
}
}
const parentRequireSet = new Set((parentRequires || []).map(({ resolved: { id } }) => id));
return (
await Promise.all(
Object.keys(resolvedSources)
.map((source) => resolvedSources[source])
.filter(({ id, external }) => !(external || parentRequireSet.has(id)))
.map(async (resolved) => {
if (isWrappedId(resolved.id, ES_IMPORT_SUFFIX)) {
return (
(await getTypeForImportedModule(
(
await this.load({ id: resolved.id })
).meta.commonjs.resolved,
this.load
)) !== IS_WRAPPED_COMMONJS
);
}
return (await getTypeForImportedModule(resolved, this.load)) === IS_WRAPPED_COMMONJS;
})
)
).some((shouldTransform) => shouldTransform);
},
/* eslint-disable no-param-reassign */
resolveRequireSourcesAndUpdateMeta:
(rollupContext) => async (parentId, isParentCommonJS, parentMeta, sources) => {
parentMeta.initialCommonJSType = isParentCommonJS;
parentMeta.requires = [];
parentMeta.isRequiredCommonJS = Object.create(null);
setInitialParentType(parentId, isParentCommonJS);
const currentlyResolvingForParent = currentlyResolving.get(parentId) || new Set();
currentlyResolving.set(parentId, currentlyResolvingForParent);
const requireTargets = await Promise.all(
sources.map(async ({ source, isConditional }) => {
// Never analyze or proxy internal modules
if (source.startsWith('\0')) {
return { id: source, allowProxy: false };
}
currentlyResolvingForParent.add(source);
const resolved =
(await rollupContext.resolve(source, parentId, {
skipSelf: false,
custom: { 'node-resolve': { isRequire: true } }
})) || resolveExtensions(source, parentId, extensions);
currentlyResolvingForParent.delete(source);
if (!resolved) {
return { id: wrapId(source, EXTERNAL_SUFFIX), allowProxy: false };
}
const childId = resolved.id;
if (resolved.external) {
return { id: wrapId(childId, EXTERNAL_SUFFIX), allowProxy: false };
}
parentMeta.requires.push({ resolved, isConditional });
await analyzeRequiredModule(parentId, resolved, isConditional, rollupContext.load);
return { id: childId, allowProxy: true };
})
);
parentMeta.isCommonJS = getTypeForFullyAnalyzedModule(parentId);
fullyAnalyzedModules[parentId] = true;
return requireTargets.map(({ id: dependencyId, allowProxy }, index) => {
// eslint-disable-next-line no-multi-assign
const isCommonJS = (parentMeta.isRequiredCommonJS[dependencyId] =
getTypeForFullyAnalyzedModule(dependencyId));
fullyAnalyzedModules[dependencyId] = true;
return {
source: sources[index].source,
id: allowProxy
? isCommonJS === IS_WRAPPED_COMMONJS
? wrapId(dependencyId, WRAPPED_SUFFIX)
: wrapId(dependencyId, PROXY_SUFFIX)
: dependencyId,
isCommonJS
};
});
},
isCurrentlyResolving(source, parentId) {
const currentlyResolvingForParent = currentlyResolving.get(parentId);
return currentlyResolvingForParent && currentlyResolvingForParent.has(source);
}
};
}
function validateVersion(actualVersion, peerDependencyVersion, name) {
const versionRegexp = /\^(\d+\.\d+\.\d+)/g;
let minMajor = Infinity;
let minMinor = Infinity;
let minPatch = Infinity;
let foundVersion;
// eslint-disable-next-line no-cond-assign
while ((foundVersion = versionRegexp.exec(peerDependencyVersion))) {
const [foundMajor, foundMinor, foundPatch] = foundVersion[1].split('.').map(Number);
if (foundMajor < minMajor) {
minMajor = foundMajor;
minMinor = foundMinor;
minPatch = foundPatch;
}
}
if (!actualVersion) {
throw new Error(
`Insufficient ${name} version: "@rollup/plugin-commonjs" requires at least ${name}@${minMajor}.${minMinor}.${minPatch}.`
);
}
const [major, minor, patch] = actualVersion.split('.').map(Number);
if (
major < minMajor ||
(major === minMajor && (minor < minMinor || (minor === minMinor && patch < minPatch)))
) {
throw new Error(
`Insufficient ${name} version: "@rollup/plugin-commonjs" requires at least ${name}@${minMajor}.${minMinor}.${minPatch} but found ${name}@${actualVersion}.`
);
}
}
const operators = {
'==': (x) => equals(x.left, x.right, false),
'!=': (x) => not(operators['=='](x)),
'===': (x) => equals(x.left, x.right, true),
'!==': (x) => not(operators['==='](x)),
'!': (x) => isFalsy(x.argument),
'&&': (x) => isTruthy(x.left) && isTruthy(x.right),
'||': (x) => isTruthy(x.left) || isTruthy(x.right)
};
function not(value) {
return value === null ? value : !value;
}
function equals(a, b, strict) {
if (a.type !== b.type) return null;
// eslint-disable-next-line eqeqeq
if (a.type === 'Literal') return strict ? a.value === b.value : a.value == b.value;
return null;
}
function isTruthy(node) {
if (!node) return false;
if (node.type === 'Literal') return !!node.value;
if (node.type === 'ParenthesizedExpression') return isTruthy(node.expression);
if (node.operator in operators) return operators[node.operator](node);
return null;
}
function isFalsy(node) {
return not(isTruthy(node));
}
function getKeypath(node) {
const parts = [];
while (node.type === 'MemberExpression') {
if (node.computed) return null;
parts.unshift(node.property.name);
// eslint-disable-next-line no-param-reassign
node = node.object;
}
if (node.type !== 'Identifier') return null;
const { name } = node;
parts.unshift(name);
return { name, keypath: parts.join('.') };
}
const KEY_COMPILED_ESM = '__esModule';
function getDefineCompiledEsmType(node) {
const definedPropertyWithExports = getDefinePropertyCallName(node, 'exports');
const definedProperty =
definedPropertyWithExports || getDefinePropertyCallName(node, 'module.exports');
if (definedProperty && definedProperty.key === KEY_COMPILED_ESM) {
return isTruthy(definedProperty.value)
? definedPropertyWithExports
? 'exports'
: 'module'
: false;
}
return false;
}
function getDefinePropertyCallName(node, targetName) {
const {
callee: { object, property }
} = node;
if (!object || object.type !== 'Identifier' || object.name !== 'Object') return;
if (!property || property.type !== 'Identifier' || property.name !== 'defineProperty') return;
if (node.arguments.length !== 3) return;
const targetNames = targetName.split('.');
const [target, key, value] = node.arguments;
if (targetNames.length === 1) {
if (target.type !== 'Identifier' || target.name !== targetNames[0]) {
return;
}
}
if (targetNames.length === 2) {
if (
target.type !== 'MemberExpression' ||
target.object.name !== targetNames[0] ||
target.property.name !== targetNames[1]
) {
return;
}
}
if (value.type !== 'ObjectExpression' || !value.properties) return;
const valueProperty = value.properties.find((p) => p.key && p.key.name === 'value');
if (!valueProperty || !valueProperty.value) return;
// eslint-disable-next-line consistent-return
return { key: key.value, value: valueProperty.value };
}
function isShorthandProperty(parent) {
return parent && parent.type === 'Property' && parent.shorthand;
}
function wrapCode(magicString, uses, moduleName, exportsName, indentExclusionRanges) {
const args = [];
const passedArgs = [];
if (uses.module) {
args.push('module');
passedArgs.push(moduleName);
}
if (uses.exports) {
args.push('exports');
passedArgs.push(uses.module ? `${moduleName}.exports` : exportsName);
}
magicString
.trim()
.indent('\t', { exclude: indentExclusionRanges })
.prepend(`(function (${args.join(', ')}) {\n`)
// For some reason, this line is only indented correctly when using a
// require-wrapper if we have this leading space
.append(` \n} (${passedArgs.join(', ')}));`);
}
function rewriteExportsAndGetExportsBlock(
magicString,
moduleName,
exportsName,
exportedExportsName,
wrapped,
moduleExportsAssignments,
firstTopLevelModuleExportsAssignment,
exportsAssignmentsByName,
topLevelAssignments,
defineCompiledEsmExpressions,
deconflictedExportNames,
code,
HELPERS_NAME,
exportMode,
defaultIsModuleExports,
usesRequireWrapper,
requireName
) {
const exports = [];
const exportDeclarations = [];
if (usesRequireWrapper) {
getExportsWhenUsingRequireWrapper(
magicString,
wrapped,
exportMode,
exports,
moduleExportsAssignments,
exportsAssignmentsByName,
moduleName,
exportsName,
requireName,
defineCompiledEsmExpressions
);
} else if (exportMode === 'replace') {
getExportsForReplacedModuleExports(
magicString,
exports,
exportDeclarations,
moduleExportsAssignments,
firstTopLevelModuleExportsAssignment,
exportsName,
defaultIsModuleExports,
HELPERS_NAME
);
} else {
if (exportMode === 'module') {
exportDeclarations.push(`var ${exportedExportsName} = ${moduleName}.exports`);
exports.push(`${exportedExportsName} as __moduleExports`);
} else {
exports.push(`${exportsName} as __moduleExports`);
}
if (wrapped) {
exportDeclarations.push(
getDefaultExportDeclaration(exportedExportsName, defaultIsModuleExports, HELPERS_NAME)
);
} else {
getExports(
magicString,
exports,
exportDeclarations,
moduleExportsAssignments,
exportsAssignmentsByName,
deconflictedExportNames,
topLevelAssignments,
moduleName,
exportsName,
exportedExportsName,
defineCompiledEsmExpressions,
HELPERS_NAME,
defaultIsModuleExports,
exportMode
);
}
}
if (exports.length) {
exportDeclarations.push(`export { ${exports.join(', ')} }`);
}
return `\n\n${exportDeclarations.join(';\n')};`;
}
function getExportsWhenUsingRequireWrapper(
magicString,
wrapped,
exportMode,
exports,
moduleExportsAssignments,
exportsAssignmentsByName,
moduleName,
exportsName,
requireName,
defineCompiledEsmExpressions
) {
exports.push(`${requireName} as __require`);
if (wrapped) return;
if (exportMode === 'replace') {
rewriteModuleExportsAssignments(magicString, moduleExportsAssignments, exportsName);
} else {
rewriteModuleExportsAssignments(magicString, moduleExportsAssignments, `${moduleName}.exports`);
// Collect and rewrite named exports
for (const [exportName, { nodes }] of exportsAssignmentsByName) {
for (const { node, type } of nodes) {
magicString.overwrite(
node.start,
node.left.end,
`${
exportMode === 'module' && type === 'module' ? `${moduleName}.exports` : exportsName
}.${exportName}`
);
}
}
replaceDefineCompiledEsmExpressionsAndGetIfRestorable(
defineCompiledEsmExpressions,
magicString,
exportMode,
moduleName,
exportsName
);
}
}
function getExportsForReplacedModuleExports(
magicString,
exports,
exportDeclarations,
moduleExportsAssignments,
firstTopLevelModuleExportsAssignment,
exportsName,
defaultIsModuleExports,
HELPERS_NAME
) {
for (const { left } of moduleExportsAssignments) {
magicString.overwrite(left.start, left.end, exportsName);
}
magicString.prependRight(firstTopLevelModuleExportsAssignment.left.start, 'var ');
exports.push(`${exportsName} as __moduleExports`);
exportDeclarations.push(
getDefaultExportDeclaration(exportsName, defaultIsModuleExports, HELPERS_NAME)
);
}
function getDefaultExportDeclaration(exportedExportsName, defaultIsModuleExports, HELPERS_NAME) {
return `export default ${
defaultIsModuleExports === true
? exportedExportsName
: defaultIsModuleExports === false
? `${exportedExportsName}.default`
: `/*@__PURE__*/${HELPERS_NAME}.getDefaultExportFromCjs(${exportedExportsName})`
}`;
}
function getExports(
magicString,
exports,
exportDeclarations,
moduleExportsAssignments,
exportsAssignmentsByName,
deconflictedExportNames,
topLevelAssignments,
moduleName,
exportsName,
exportedExportsName,
defineCompiledEsmExpressions,
HELPERS_NAME,
defaultIsModuleExports,
exportMode
) {
let deconflictedDefaultExportName;
// Collect and rewrite module.exports assignments
for (const { left } of moduleExportsAssignments) {
magicString.overwrite(left.start, left.end, `${moduleName}.exports`);
}
// Collect and rewrite named exports
for (const [exportName, { nodes }] of exportsAssignmentsByName) {
const deconflicted = deconflictedExportNames[exportName];
let needsDeclaration = true;
for (const { node, type } of nodes) {
let replacement = `${deconflicted} = ${
exportMode === 'module' && type === 'module' ? `${moduleName}.exports` : exportsName
}.${exportName}`;
if (needsDeclaration && topLevelAssignments.has(node)) {
replacement = `var ${replacement}`;
needsDeclaration = false;
}
magicString.overwrite(node.start, node.left.end, replacement);
}
if (needsDeclaration) {
magicString.prepend(`var ${deconflicted};\n`);
}
if (exportName === 'default') {
deconflictedDefaultExportName = deconflicted;
} else {
exports.push(exportName === deconflicted ? exportName : `${deconflicted} as ${exportName}`);
}
}
const isRestorableCompiledEsm = replaceDefineCompiledEsmExpressionsAndGetIfRestorable(
defineCompiledEsmExpressions,
magicString,
exportMode,
moduleName,
exportsName
);
if (
defaultIsModuleExports === false ||
(defaultIsModuleExports === 'auto' &&
isRestorableCompiledEsm &&
moduleExportsAssignments.length === 0)
) {
// If there is no deconflictedDefaultExportName, then we use the namespace as
// fallback because there can be no "default" property on the namespace
exports.push(`${deconflictedDefaultExportName || exportedExportsName} as default`);
} else if (
defaultIsModuleExports === true ||
(!isRestorableCompiledEsm && moduleExportsAssignments.length === 0)
) {
exports.push(`${exportedExportsName} as default`);
} else {
exportDeclarations.push(
getDefaultExportDeclaration(exportedExportsName, defaultIsModuleExports, HELPERS_NAME)
);
}
}
function rewriteModuleExportsAssignments(magicString, moduleExportsAssignments, exportsName) {
for (const { left } of moduleExportsAssignments) {
magicString.overwrite(left.start, left.end, exportsName);
}
}
function replaceDefineCompiledEsmExpressionsAndGetIfRestorable(
defineCompiledEsmExpressions,
magicString,
exportMode,
moduleName,
exportsName
) {
let isRestorableCompiledEsm = false;
for (const { node, type } of defineCompiledEsmExpressions) {
isRestorableCompiledEsm = true;
const moduleExportsExpression =
node.type === 'CallExpression' ? node.arguments[0] : node.left.object;
magicString.overwrite(
moduleExportsExpression.start,
moduleExportsExpression.end,
exportMode === 'module' && type === 'module' ? `${moduleName}.exports` : exportsName
);
}
return isRestorableCompiledEsm;
}
function isRequireExpression(node, scope) {
if (!node) return false;
if (node.type !== 'CallExpression') return false;
// Weird case of `require()` or `module.require()` without arguments
if (node.arguments.length === 0) return false;
return isRequire(node.callee, scope);
}
function isRequire(node, scope) {
return (
(node.type === 'Identifier' && node.name === 'require' && !scope.contains('require')) ||
(node.type === 'MemberExpression' && isModuleRequire(node, scope))
);
}
function isModuleRequire({ object, property }, scope) {
return (
object.type === 'Identifier' &&
object.name === 'module' &&
property.type === 'Identifier' &&
property.name === 'require' &&
!scope.contains('module')
);
}
function hasDynamicArguments(node) {
return (
node.arguments.length > 1 ||
(node.arguments[0].type !== 'Literal' &&
(node.arguments[0].type !== 'TemplateLiteral' || node.arguments[0].expressions.length > 0))
);
}
const reservedMethod = { resolve: true, cache: true, main: true };
function isNodeRequirePropertyAccess(parent) {
return parent && parent.property && reservedMethod[parent.property.name];
}
function getRequireStringArg(node) {
return node.arguments[0].type === 'Literal'
? node.arguments[0].value
: node.arguments[0].quasis[0].value.cooked;
}
function getRequireHandlers() {
const requireExpressions = [];
function addRequireExpression(
sourceId,
node,
scope,
usesReturnValue,
isInsideTryBlock,
isInsideConditional,
toBeRemoved
) {
requireExpressions.push({
sourceId,
node,
scope,
usesReturnValue,
isInsideTryBlock,
isInsideConditional,
toBeRemoved
});
}
async function rewriteRequireExpressionsAndGetImportBlock(
magicString,
topLevelDeclarations,
reassignedNames,
helpersName,
dynamicRequireName,
moduleName,
exportsName,
id,
exportMode,
resolveRequireSourcesAndUpdateMeta,
needsRequireWrapper,
isEsModule,
isDynamicRequireModulesEnabled,
getIgnoreTryCatchRequireStatementMode,
commonjsMeta
) {
const imports = [];
imports.push(`import * as ${helpersName} from "${HELPERS_ID}"`);
if (dynamicRequireName) {
imports.push(
`import { ${
isDynamicRequireModulesEnabled ? CREATE_COMMONJS_REQUIRE_EXPORT : COMMONJS_REQUIRE_EXPORT
} as ${dynamicRequireName} } from "${DYNAMIC_MODULES_ID}"`
);
}
if (exportMode === 'module') {
imports.push(
`import { __module as ${moduleName} } from ${JSON.stringify(wrapId(id, MODULE_SUFFIX))}`,
`var ${exportsName} = ${moduleName}.exports`
);
} else if (exportMode === 'exports') {
imports.push(
`import { __exports as ${exportsName} } from ${JSON.stringify(wrapId(id, EXPORTS_SUFFIX))}`
);
}
const requiresBySource = collectSources(requireExpressions);
const requireTargets = await resolveRequireSourcesAndUpdateMeta(
id,
needsRequireWrapper ? IS_WRAPPED_COMMONJS : !isEsModule,
commonjsMeta,
Object.keys(requiresBySource).map((source) => {
return {
source,
isConditional: requiresBySource[source].every((require) => require.isInsideConditional)
};
})
);
processRequireExpressions(
imports,
requireTargets,
requiresBySource,
getIgnoreTryCatchRequireStatementMode,
magicString
);
return imports.length ? `${imports.join(';\n')};\n\n` : '';
}
return {
addRequireExpression,
rewriteRequireExpressionsAndGetImportBlock
};
}
function collectSources(requireExpressions) {
const requiresBySource = Object.create(null);
for (const requireExpression of requireExpressions) {
const { sourceId } = requireExpression;
if (!requiresBySource[sourceId]) {
requiresBySource[sourceId] = [];
}
const requires = requiresBySource[sourceId];
requires.push(requireExpression);
}
return requiresBySource;
}
function processRequireExpressions(
imports,
requireTargets,
requiresBySource,
getIgnoreTryCatchRequireStatementMode,
magicString
) {
const generateRequireName = getGenerateRequireName();
for (const { source, id: resolvedId, isCommonJS } of requireTargets) {
const requires = requiresBySource[source];
const name = generateRequireName(requires);
let usesRequired = false;
let needsImport = false;
for (const { node, usesReturnValue, toBeRemoved, isInsideTryBlock } of requires) {
const { canConvertRequire, shouldRemoveRequire } =
isInsideTryBlock && isWrappedId(resolvedId, EXTERNAL_SUFFIX)
? getIgnoreTryCatchRequireStatementMode(source)
: { canConvertRequire: true, shouldRemoveRequire: false };
if (shouldRemoveRequire) {
if (usesReturnValue) {
magicString.overwrite(node.start, node.end, 'undefined');
} else {
magicString.remove(toBeRemoved.start, toBeRemoved.end);
}
} else if (canConvertRequire) {
needsImport = true;
if (isCommonJS === IS_WRAPPED_COMMONJS) {
magicString.overwrite(node.start, node.end, `${name}()`);
} else if (usesReturnValue) {
usesRequired = true;
magicString.overwrite(node.start, node.end, name);
} else {
magicString.remove(toBeRemoved.start, toBeRemoved.end);
}
}
}
if (needsImport) {
if (isCommonJS === IS_WRAPPED_COMMONJS) {
imports.push(`import { __require as ${name} } from ${JSON.stringify(resolvedId)}`);
} else {
imports.push(`import ${usesRequired ? `${name} from ` : ''}${JSON.stringify(resolvedId)}`);
}
}
}
}
function getGenerateRequireName() {
let uid = 0;
return (requires) => {
let name;
const hasNameConflict = ({ scope }) => scope.contains(name);
do {
name = `require$$${uid}`;
uid += 1;
} while (requires.some(hasNameConflict));
return name;
};
}
/* eslint-disable no-param-reassign, no-shadow, no-underscore-dangle, no-continue */
const exportsPattern = /^(?:module\.)?exports(?:\.([a-zA-Z_$][a-zA-Z_$0-9]*))?$/;
const functionType = /^(?:FunctionDeclaration|FunctionExpression|ArrowFunctionExpression)$/;
// There are three different types of CommonJS modules, described by their
// "exportMode":
// - exports: Only assignments to (module.)exports properties
// - replace: A single assignment to module.exports itself
// - module: Anything else
// Special cases:
// - usesRequireWrapper
// - isWrapped
async function transformCommonjs(
parse,
code,
id,
isEsModule,
ignoreGlobal,
ignoreRequire,
ignoreDynamicRequires,
getIgnoreTryCatchRequireStatementMode,
sourceMap,
isDynamicRequireModulesEnabled,
dynamicRequireModules,
commonDir,
astCache,
defaultIsModuleExports,
needsRequireWrapper,
resolveRequireSourcesAndUpdateMeta,
isRequired,
checkDynamicRequire,
commonjsMeta
) {
const ast = astCache || tryParse(parse, code, id);
const magicString = new MagicString(code);
const uses = {
module: false,
exports: false,
global: false,
require: false
};
const virtualDynamicRequirePath =
isDynamicRequireModulesEnabled && getVirtualPathForDynamicRequirePath(dirname$1(id), commonDir);
let scope = attachScopes(ast, 'scope');
let lexicalDepth = 0;
let programDepth = 0;
let classBodyDepth = 0;
let currentTryBlockEnd = null;
let shouldWrap = false;
const globals = new Set();
// A conditionalNode is a node for which execution is not guaranteed. If such a node is a require
// or contains nested requires, those should be handled as function calls unless there is an
// unconditional require elsewhere.
let currentConditionalNodeEnd = null;
const conditionalNodes = new Set();
const { addRequireExpression, rewriteRequireExpressionsAndGetImportBlock } = getRequireHandlers();
// See which names are assigned to. This is necessary to prevent
// illegally replacing `var foo = require('foo')` with `import foo from 'foo'`,
// where `foo` is later reassigned. (This happens in the wild. CommonJS, sigh)
const reassignedNames = new Set();
const topLevelDeclarations = [];
const skippedNodes = new Set();
const moduleAccessScopes = new Set([scope]);
const exportsAccessScopes = new Set([scope]);
const moduleExportsAssignments = [];
let firstTopLevelModuleExportsAssignment = null;
const exportsAssignmentsByName = new Map();
const topLevelAssignments = new Set();
const topLevelDefineCompiledEsmExpressions = [];
const replacedGlobal = [];
const replacedDynamicRequires = [];
const importedVariables = new Set();
const indentExclusionRanges = [];
walk$3(ast, {
enter(node, parent) {
if (skippedNodes.has(node)) {
this.skip();
return;
}
if (currentTryBlockEnd !== null && node.start > currentTryBlockEnd) {
currentTryBlockEnd = null;
}
if (currentConditionalNodeEnd !== null && node.start > currentConditionalNodeEnd) {
currentConditionalNodeEnd = null;
}
if (currentConditionalNodeEnd === null && conditionalNodes.has(node)) {
currentConditionalNodeEnd = node.end;
}
programDepth += 1;
if (node.scope) ({ scope } = node);
if (functionType.test(node.type)) lexicalDepth += 1;
if (sourceMap) {
magicString.addSourcemapLocation(node.start);
magicString.addSourcemapLocation(node.end);
}
// eslint-disable-next-line default-case
switch (node.type) {
case 'AssignmentExpression':
if (node.left.type === 'MemberExpression') {
const flattened = getKeypath(node.left);
if (!flattened || scope.contains(flattened.name)) return;
const exportsPatternMatch = exportsPattern.exec(flattened.keypath);
if (!exportsPatternMatch || flattened.keypath === 'exports') return;
const [, exportName] = exportsPatternMatch;
uses[flattened.name] = true;
// we're dealing with `module.exports = ...` or `[module.]exports.foo = ...` –
if (flattened.keypath === 'module.exports') {
moduleExportsAssignments.push(node);
if (programDepth > 3) {
moduleAccessScopes.add(scope);
} else if (!firstTopLevelModuleExportsAssignment) {
firstTopLevelModuleExportsAssignment = node;
}
} else if (exportName === KEY_COMPILED_ESM) {
if (programDepth > 3) {
shouldWrap = true;
} else {
// The "type" is either "module" or "exports" to discern
// assignments to module.exports vs exports if needed
topLevelDefineCompiledEsmExpressions.push({ node, type: flattened.name });
}
} else {
const exportsAssignments = exportsAssignmentsByName.get(exportName) || {
nodes: [],
scopes: new Set()
};
exportsAssignments.nodes.push({ node, type: flattened.name });
exportsAssignments.scopes.add(scope);
exportsAccessScopes.add(scope);
exportsAssignmentsByName.set(exportName, exportsAssignments);
if (programDepth <= 3) {
topLevelAssignments.add(node);
}
}
skippedNodes.add(node.left);
} else {
for (const name of extractAssignedNames(node.left)) {
reassignedNames.add(name);
}
}
return;
case 'CallExpression': {
const defineCompiledEsmType = getDefineCompiledEsmType(node);
if (defineCompiledEsmType) {
if (programDepth === 3 && parent.type === 'ExpressionStatement') {
// skip special handling for [module.]exports until we know we render this
skippedNodes.add(node.arguments[0]);
topLevelDefineCompiledEsmExpressions.push({ node, type: defineCompiledEsmType });
} else {
shouldWrap = true;
}
return;
}
// Transform require.resolve
if (
isDynamicRequireModulesEnabled &&
node.callee.object &&
isRequire(node.callee.object, scope) &&
node.callee.property.name === 'resolve'
) {
checkDynamicRequire(node.start);
uses.require = true;
const requireNode = node.callee.object;
replacedDynamicRequires.push(requireNode);
skippedNodes.add(node.callee);
return;
}
if (!isRequireExpression(node, scope)) {
const keypath = getKeypath(node.callee);
if (keypath && importedVariables.has(keypath.name)) {
// Heuristic to deoptimize requires after a required function has been called
currentConditionalNodeEnd = Infinity;
}
return;
}
skippedNodes.add(node.callee);
uses.require = true;
if (hasDynamicArguments(node)) {
if (isDynamicRequireModulesEnabled) {
checkDynamicRequire(node.start);
}
if (!ignoreDynamicRequires) {
replacedDynamicRequires.push(node.callee);
}
return;
}
const requireStringArg = getRequireStringArg(node);
if (!ignoreRequire(requireStringArg)) {
const usesReturnValue = parent.type !== 'ExpressionStatement';
const toBeRemoved =
parent.type === 'ExpressionStatement' &&
(!currentConditionalNodeEnd ||
// We should completely remove requires directly in a try-catch
// so that Rollup can remove up the try-catch
(currentTryBlockEnd !== null && currentTryBlockEnd < currentConditionalNodeEnd))
? parent
: node;
addRequireExpression(
requireStringArg,
node,
scope,
usesReturnValue,
currentTryBlockEnd !== null,
currentConditionalNodeEnd !== null,
toBeRemoved
);
if (parent.type === 'VariableDeclarator' && parent.id.type === 'Identifier') {
for (const name of extractAssignedNames(parent.id)) {
importedVariables.add(name);
}
}
}
return;
}
case 'ClassBody':
classBodyDepth += 1;
return;
case 'ConditionalExpression':
case 'IfStatement':
// skip dead branches
if (isFalsy(node.test)) {
skippedNodes.add(node.consequent);
} else if (isTruthy(node.test)) {
if (node.alternate) {
skippedNodes.add(node.alternate);
}
} else {
conditionalNodes.add(node.consequent);
if (node.alternate) {
conditionalNodes.add(node.alternate);
}
}
return;
case 'ArrowFunctionExpression':
case 'FunctionDeclaration':
case 'FunctionExpression':
// requires in functions should be conditional unless it is an IIFE
if (
currentConditionalNodeEnd === null &&
!(parent.type === 'CallExpression' && parent.callee === node)
) {
currentConditionalNodeEnd = node.end;
}
return;
case 'Identifier': {
const { name } = node;
if (
!isReference(node, parent) ||
scope.contains(name) ||
(parent.type === 'PropertyDefinition' && parent.key === node)
)
return;
switch (name) {
case 'require':
uses.require = true;
if (isNodeRequirePropertyAccess(parent)) {
return;
}
if (!ignoreDynamicRequires) {
if (isShorthandProperty(parent)) {
// as key and value are the same object, isReference regards
// both as references, so we need to skip now
skippedNodes.add(parent.value);
magicString.prependRight(node.start, 'require: ');
}
replacedDynamicRequires.push(node);
}
return;
case 'module':
case 'exports':
shouldWrap = true;
uses[name] = true;
return;
case 'global':
uses.global = true;
if (!ignoreGlobal) {
replacedGlobal.push(node);
}
return;
case 'define':
magicString.overwrite(node.start, node.end, 'undefined', {
storeName: true
});
return;
default:
globals.add(name);
return;
}
}
case 'LogicalExpression':
// skip dead branches
if (node.operator === '&&') {
if (isFalsy(node.left)) {
skippedNodes.add(node.right);
} else if (!isTruthy(node.left)) {
conditionalNodes.add(node.right);
}
} else if (node.operator === '||') {
if (isTruthy(node.left)) {
skippedNodes.add(node.right);
} else if (!isFalsy(node.left)) {
conditionalNodes.add(node.right);
}
}
return;
case 'MemberExpression':
if (!isDynamicRequireModulesEnabled && isModuleRequire(node, scope)) {
uses.require = true;
replacedDynamicRequires.push(node);
skippedNodes.add(node.object);
skippedNodes.add(node.property);
}
return;
case 'ReturnStatement':
// if top-level return, we need to wrap it
if (lexicalDepth === 0) {
shouldWrap = true;
}
return;
case 'ThisExpression':
// rewrite top-level `this` as `commonjsHelpers.commonjsGlobal`
if (lexicalDepth === 0 && !classBodyDepth) {
uses.global = true;
if (!ignoreGlobal) {
replacedGlobal.push(node);
}
}
return;
case 'TryStatement':
if (currentTryBlockEnd === null) {
currentTryBlockEnd = node.block.end;
}
if (currentConditionalNodeEnd === null) {
currentConditionalNodeEnd = node.end;
}
return;
case 'UnaryExpression':
// rewrite `typeof module`, `typeof module.exports` and `typeof exports` (https://github.com/rollup/rollup-plugin-commonjs/issues/151)
if (node.operator === 'typeof') {
const flattened = getKeypath(node.argument);
if (!flattened) return;
if (scope.contains(flattened.name)) return;
if (
!isEsModule &&
(flattened.keypath === 'module.exports' ||
flattened.keypath === 'module' ||
flattened.keypath === 'exports')
) {
magicString.overwrite(node.start, node.end, `'object'`, {
storeName: false
});
}
}
return;
case 'VariableDeclaration':
if (!scope.parent) {
topLevelDeclarations.push(node);
}
return;
case 'TemplateElement':
if (node.value.raw.includes('\n')) {
indentExclusionRanges.push([node.start, node.end]);
}
}
},
leave(node) {
programDepth -= 1;
if (node.scope) scope = scope.parent;
if (functionType.test(node.type)) lexicalDepth -= 1;
if (node.type === 'ClassBody') classBodyDepth -= 1;
}
});
const nameBase = getName(id);
const exportsName = deconflict([...exportsAccessScopes], globals, nameBase);
const moduleName = deconflict([...moduleAccessScopes], globals, `${nameBase}Module`);
const requireName = deconflict([scope], globals, `require${capitalize(nameBase)}`);
const isRequiredName = deconflict([scope], globals, `hasRequired${capitalize(nameBase)}`);
const helpersName = deconflict([scope], globals, 'commonjsHelpers');
const dynamicRequireName =
replacedDynamicRequires.length > 0 &&
deconflict(
[scope],
globals,
isDynamicRequireModulesEnabled ? CREATE_COMMONJS_REQUIRE_EXPORT : COMMONJS_REQUIRE_EXPORT
);
const deconflictedExportNames = Object.create(null);
for (const [exportName, { scopes }] of exportsAssignmentsByName) {
deconflictedExportNames[exportName] = deconflict([...scopes], globals, exportName);
}
for (const node of replacedGlobal) {
magicString.overwrite(node.start, node.end, `${helpersName}.commonjsGlobal`, {
storeName: true
});
}
for (const node of replacedDynamicRequires) {
magicString.overwrite(
node.start,
node.end,
isDynamicRequireModulesEnabled
? `${dynamicRequireName}(${JSON.stringify(virtualDynamicRequirePath)})`
: dynamicRequireName,
{
contentOnly: true,
storeName: true
}
);
}
// We cannot wrap ES/mixed modules
shouldWrap = !isEsModule && (shouldWrap || (uses.exports && moduleExportsAssignments.length > 0));
if (
!(
shouldWrap ||
isRequired ||
needsRequireWrapper ||
uses.module ||
uses.exports ||
uses.require ||
topLevelDefineCompiledEsmExpressions.length > 0
) &&
(ignoreGlobal || !uses.global)
) {
return { meta: { commonjs: { isCommonJS: false } } };
}
let leadingComment = '';
if (code.startsWith('/*')) {
const commentEnd = code.indexOf('*/', 2) + 2;
leadingComment = `${code.slice(0, commentEnd)}\n`;
magicString.remove(0, commentEnd).trim();
}
let shebang = '';
if (code.startsWith('#!')) {
const shebangEndPosition = code.indexOf('\n') + 1;
shebang = code.slice(0, shebangEndPosition);
magicString.remove(0, shebangEndPosition).trim();
}
const exportMode = isEsModule
? 'none'
: shouldWrap
? uses.module
? 'module'
: 'exports'
: firstTopLevelModuleExportsAssignment
? exportsAssignmentsByName.size === 0 && topLevelDefineCompiledEsmExpressions.length === 0
? 'replace'
: 'module'
: moduleExportsAssignments.length === 0
? 'exports'
: 'module';
const exportedExportsName =
exportMode === 'module' ? deconflict([], globals, `${nameBase}Exports`) : exportsName;
const importBlock = await rewriteRequireExpressionsAndGetImportBlock(
magicString,
topLevelDeclarations,
reassignedNames,
helpersName,
dynamicRequireName,
moduleName,
exportsName,
id,
exportMode,
resolveRequireSourcesAndUpdateMeta,
needsRequireWrapper,
isEsModule,
isDynamicRequireModulesEnabled,
getIgnoreTryCatchRequireStatementMode,
commonjsMeta
);
const usesRequireWrapper = commonjsMeta.isCommonJS === IS_WRAPPED_COMMONJS;
const exportBlock = isEsModule
? ''
: rewriteExportsAndGetExportsBlock(
magicString,
moduleName,
exportsName,
exportedExportsName,
shouldWrap,
moduleExportsAssignments,
firstTopLevelModuleExportsAssignment,
exportsAssignmentsByName,
topLevelAssignments,
topLevelDefineCompiledEsmExpressions,
deconflictedExportNames,
code,
helpersName,
exportMode,
defaultIsModuleExports,
usesRequireWrapper,
requireName
);
if (shouldWrap) {
wrapCode(magicString, uses, moduleName, exportsName, indentExclusionRanges);
}
if (usesRequireWrapper) {
magicString.trim().indent('\t', {
exclude: indentExclusionRanges
});
const exported = exportMode === 'module' ? `${moduleName}.exports` : exportsName;
magicString.prepend(
`var ${isRequiredName};
function ${requireName} () {
\tif (${isRequiredName}) return ${exported};
\t${isRequiredName} = 1;
`
).append(`
\treturn ${exported};
}`);
if (exportMode === 'replace') {
magicString.prepend(`var ${exportsName};\n`);
}
}
magicString
.trim()
.prepend(shebang + leadingComment + importBlock)
.append(exportBlock);
return {
code: magicString.toString(),
map: sourceMap ? magicString.generateMap() : null,
syntheticNamedExports: isEsModule || usesRequireWrapper ? false : '__moduleExports',
meta: { commonjs: { ...commonjsMeta, shebang } }
};
}
const PLUGIN_NAME = 'commonjs';
function commonjs(options = {}) {
const {
ignoreGlobal,
ignoreDynamicRequires,
requireReturnsDefault: requireReturnsDefaultOption,
defaultIsModuleExports: defaultIsModuleExportsOption,
esmExternals
} = options;
const extensions = options.extensions || ['.js'];
const filter = createFilter$1(options.include, options.exclude);
const isPossibleCjsId = (id) => {
const extName = extname(id);
return extName === '.cjs' || (extensions.includes(extName) && filter(id));
};
const { strictRequiresFilter, detectCyclesAndConditional } = getStrictRequiresFilter(options);
const getRequireReturnsDefault =
typeof requireReturnsDefaultOption === 'function'
? requireReturnsDefaultOption
: () => requireReturnsDefaultOption;
let esmExternalIds;
const isEsmExternal =
typeof esmExternals === 'function'
? esmExternals
: Array.isArray(esmExternals)
? ((esmExternalIds = new Set(esmExternals)), (id) => esmExternalIds.has(id))
: () => esmExternals;
const getDefaultIsModuleExports =
typeof defaultIsModuleExportsOption === 'function'
? defaultIsModuleExportsOption
: () =>
typeof defaultIsModuleExportsOption === 'boolean' ? defaultIsModuleExportsOption : 'auto';
const dynamicRequireRoot =
typeof options.dynamicRequireRoot === 'string'
? resolve$3(options.dynamicRequireRoot)
: process.cwd();
const { commonDir, dynamicRequireModules } = getDynamicRequireModules(
options.dynamicRequireTargets,
dynamicRequireRoot
);
const isDynamicRequireModulesEnabled = dynamicRequireModules.size > 0;
const ignoreRequire =
typeof options.ignore === 'function'
? options.ignore
: Array.isArray(options.ignore)
? (id) => options.ignore.includes(id)
: () => false;
const getIgnoreTryCatchRequireStatementMode = (id) => {
const mode =
typeof options.ignoreTryCatch === 'function'
? options.ignoreTryCatch(id)
: Array.isArray(options.ignoreTryCatch)
? options.ignoreTryCatch.includes(id)
: typeof options.ignoreTryCatch !== 'undefined'
? options.ignoreTryCatch
: true;
return {
canConvertRequire: mode !== 'remove' && mode !== true,
shouldRemoveRequire: mode === 'remove'
};
};
const { currentlyResolving, resolveId } = getResolveId(extensions, isPossibleCjsId);
const sourceMap = options.sourceMap !== false;
// Initialized in buildStart
let requireResolver;
function transformAndCheckExports(code, id) {
const normalizedId = normalizePathSlashes(id);
const { isEsModule, hasDefaultExport, hasNamedExports, ast } = analyzeTopLevelStatements(
this.parse,
code,
id
);
const commonjsMeta = this.getModuleInfo(id).meta.commonjs || {};
if (hasDefaultExport) {
commonjsMeta.hasDefaultExport = true;
}
if (hasNamedExports) {
commonjsMeta.hasNamedExports = true;
}
if (
!dynamicRequireModules.has(normalizedId) &&
(!(hasCjsKeywords(code, ignoreGlobal) || requireResolver.isRequiredId(id)) ||
(isEsModule && !options.transformMixedEsModules))
) {
commonjsMeta.isCommonJS = false;
return { meta: { commonjs: commonjsMeta } };
}
const needsRequireWrapper =
!isEsModule && (dynamicRequireModules.has(normalizedId) || strictRequiresFilter(id));
const checkDynamicRequire = (position) => {
const normalizedDynamicRequireRoot = normalizePathSlashes(dynamicRequireRoot);
if (normalizedId.indexOf(normalizedDynamicRequireRoot) !== 0) {
this.error(
{
code: 'DYNAMIC_REQUIRE_OUTSIDE_ROOT',
normalizedId,
normalizedDynamicRequireRoot,
message: `"${normalizedId}" contains dynamic require statements but it is not within the current dynamicRequireRoot "${normalizedDynamicRequireRoot}". You should set dynamicRequireRoot to "${dirname$1(
normalizedId
)}" or one of its parent directories.`
},
position
);
}
};
return transformCommonjs(
this.parse,
code,
id,
isEsModule,
ignoreGlobal || isEsModule,
ignoreRequire,
ignoreDynamicRequires && !isDynamicRequireModulesEnabled,
getIgnoreTryCatchRequireStatementMode,
sourceMap,
isDynamicRequireModulesEnabled,
dynamicRequireModules,
commonDir,
ast,
getDefaultIsModuleExports(id),
needsRequireWrapper,
requireResolver.resolveRequireSourcesAndUpdateMeta(this),
requireResolver.isRequiredId(id),
checkDynamicRequire,
commonjsMeta
);
}
return {
name: PLUGIN_NAME,
version: version$2,
options(rawOptions) {
// We inject the resolver in the beginning so that "catch-all-resolver" like node-resolver
// do not prevent our plugin from resolving entry points ot proxies.
const plugins = Array.isArray(rawOptions.plugins)
? [...rawOptions.plugins]
: rawOptions.plugins
? [rawOptions.plugins]
: [];
plugins.unshift({
name: 'commonjs--resolver',
resolveId
});
return { ...rawOptions, plugins };
},
buildStart({ plugins }) {
validateVersion(this.meta.rollupVersion, peerDependencies.rollup, 'rollup');
const nodeResolve = plugins.find(({ name }) => name === 'node-resolve');
if (nodeResolve) {
validateVersion(nodeResolve.version, '^13.0.6', '@rollup/plugin-node-resolve');
}
if (options.namedExports != null) {
this.warn(
'The namedExports option from "@rollup/plugin-commonjs" is deprecated. Named exports are now handled automatically.'
);
}
requireResolver = getRequireResolver(
extensions,
detectCyclesAndConditional,
currentlyResolving
);
},
buildEnd() {
if (options.strictRequires === 'debug') {
const wrappedIds = requireResolver.getWrappedIds();
if (wrappedIds.length) {
this.warn({
code: 'WRAPPED_IDS',
ids: wrappedIds,
message: `The commonjs plugin automatically wrapped the following files:\n[\n${wrappedIds
.map((id) => `\t${JSON.stringify(relative$1(process.cwd(), id))}`)
.join(',\n')}\n]`
});
} else {
this.warn({
code: 'WRAPPED_IDS',
ids: wrappedIds,
message: 'The commonjs plugin did not wrap any files.'
});
}
}
},
load(id) {
if (id === HELPERS_ID) {
return getHelpersModule();
}
if (isWrappedId(id, MODULE_SUFFIX)) {
const name = getName(unwrapId(id, MODULE_SUFFIX));
return {
code: `var ${name} = {exports: {}}; export {${name} as __module}`,
meta: { commonjs: { isCommonJS: false } }
};
}
if (isWrappedId(id, EXPORTS_SUFFIX)) {
const name = getName(unwrapId(id, EXPORTS_SUFFIX));
return {
code: `var ${name} = {}; export {${name} as __exports}`,
meta: { commonjs: { isCommonJS: false } }
};
}
if (isWrappedId(id, EXTERNAL_SUFFIX)) {
const actualId = unwrapId(id, EXTERNAL_SUFFIX);
return getUnknownRequireProxy(
actualId,
isEsmExternal(actualId) ? getRequireReturnsDefault(actualId) : true
);
}
// entry suffix is just appended to not mess up relative external resolution
if (id.endsWith(ENTRY_SUFFIX)) {
const acutalId = id.slice(0, -ENTRY_SUFFIX.length);
const {
meta: { commonjs: commonjsMeta }
} = this.getModuleInfo(acutalId);
const shebang = commonjsMeta?.shebang ?? '';
return getEntryProxy(
acutalId,
getDefaultIsModuleExports(acutalId),
this.getModuleInfo,
shebang
);
}
if (isWrappedId(id, ES_IMPORT_SUFFIX)) {
const actualId = unwrapId(id, ES_IMPORT_SUFFIX);
return getEsImportProxy(actualId, getDefaultIsModuleExports(actualId));
}
if (id === DYNAMIC_MODULES_ID) {
return getDynamicModuleRegistry(
isDynamicRequireModulesEnabled,
dynamicRequireModules,
commonDir,
ignoreDynamicRequires
);
}
if (isWrappedId(id, PROXY_SUFFIX)) {
const actualId = unwrapId(id, PROXY_SUFFIX);
return getStaticRequireProxy(actualId, getRequireReturnsDefault(actualId), this.load);
}
return null;
},
shouldTransformCachedModule(...args) {
return requireResolver.shouldTransformCachedModule.call(this, ...args);
},
transform(code, id) {
if (!isPossibleCjsId(id)) return null;
try {
return transformAndCheckExports.call(this, code, id);
} catch (err) {
return this.error(err, err.pos);
}
}
};
}
// Matches the scheme of a URL, eg "http://"
const schemeRegex = /^[\w+.-]+:\/\//;
/**
* Matches the parts of a URL:
* 1. Scheme, including ":", guaranteed.
* 2. User/password, including "@", optional.
* 3. Host, guaranteed.
* 4. Port, including ":", optional.
* 5. Path, including "/", optional.
* 6. Query, including "?", optional.
* 7. Hash, including "#", optional.
*/
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
/**
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
*
* 1. Host, optional.
* 2. Path, which may include "/", guaranteed.
* 3. Query, including "?", optional.
* 4. Hash, including "#", optional.
*/
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
function isAbsoluteUrl(input) {
return schemeRegex.test(input);
}
function isSchemeRelativeUrl(input) {
return input.startsWith('//');
}
function isAbsolutePath(input) {
return input.startsWith('/');
}
function isFileUrl(input) {
return input.startsWith('file:');
}
function isRelative(input) {
return /^[.?#]/.test(input);
}
function parseAbsoluteUrl(input) {
const match = urlRegex.exec(input);
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
}
function parseFileUrl(input) {
const match = fileRegex.exec(input);
const path = match[2];
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
}
function makeUrl(scheme, user, host, port, path, query, hash) {
return {
scheme,
user,
host,
port,
path,
query,
hash,
type: 7 /* Absolute */,
};
}
function parseUrl$3(input) {
if (isSchemeRelativeUrl(input)) {
const url = parseAbsoluteUrl('http:' + input);
url.scheme = '';
url.type = 6 /* SchemeRelative */;
return url;
}
if (isAbsolutePath(input)) {
const url = parseAbsoluteUrl('http://foo.com' + input);
url.scheme = '';
url.host = '';
url.type = 5 /* AbsolutePath */;
return url;
}
if (isFileUrl(input))
return parseFileUrl(input);
if (isAbsoluteUrl(input))
return parseAbsoluteUrl(input);
const url = parseAbsoluteUrl('http://foo.com/' + input);
url.scheme = '';
url.host = '';
url.type = input
? input.startsWith('?')
? 3 /* Query */
: input.startsWith('#')
? 2 /* Hash */
: 4 /* RelativePath */
: 1 /* Empty */;
return url;
}
function stripPathFilename(path) {
// If a path ends with a parent directory "..", then it's a relative path with excess parent
// paths. It's not a file, so we can't strip it.
if (path.endsWith('/..'))
return path;
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}
function mergePaths(url, base) {
normalizePath$4(base, base.type);
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
// path).
if (url.path === '/') {
url.path = base.path;
}
else {
// Resolution happens relative to the base path's directory, not the file.
url.path = stripPathFilename(base.path) + url.path;
}
}
/**
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
* "foo/.". We need to normalize to a standard representation.
*/
function normalizePath$4(url, type) {
const rel = type <= 4 /* RelativePath */;
const pieces = url.path.split('/');
// We need to preserve the first piece always, so that we output a leading slash. The item at
// pieces[0] is an empty string.
let pointer = 1;
// Positive is the number of real directories we've output, used for popping a parent directory.
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
let positive = 0;
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
// real directory, we won't need to append, unless the other conditions happen again.
let addTrailingSlash = false;
for (let i = 1; i < pieces.length; i++) {
const piece = pieces[i];
// An empty directory, could be a trailing slash, or just a double "//" in the path.
if (!piece) {
addTrailingSlash = true;
continue;
}
// If we encounter a real directory, then we don't need to append anymore.
addTrailingSlash = false;
// A current directory, which we can always drop.
if (piece === '.')
continue;
// A parent directory, we need to see if there are any real directories we can pop. Else, we
// have an excess of parents, and we'll need to keep the "..".
if (piece === '..') {
if (positive) {
addTrailingSlash = true;
positive--;
pointer--;
}
else if (rel) {
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
pieces[pointer++] = piece;
}
continue;
}
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
// any popped or dropped directories.
pieces[pointer++] = piece;
positive++;
}
let path = '';
for (let i = 1; i < pointer; i++) {
path += '/' + pieces[i];
}
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
path += '/';
}
url.path = path;
}
/**
* Attempts to resolve `input` URL/path relative to `base`.
*/
function resolve$2(input, base) {
if (!input && !base)
return '';
const url = parseUrl$3(input);
let inputType = url.type;
if (base && inputType !== 7 /* Absolute */) {
const baseUrl = parseUrl$3(base);
const baseType = baseUrl.type;
switch (inputType) {
case 1 /* Empty */:
url.hash = baseUrl.hash;
// fall through
case 2 /* Hash */:
url.query = baseUrl.query;
// fall through
case 3 /* Query */:
case 4 /* RelativePath */:
mergePaths(url, baseUrl);
// fall through
case 5 /* AbsolutePath */:
// The host, user, and port are joined, you can't copy one without the others.
url.user = baseUrl.user;
url.host = baseUrl.host;
url.port = baseUrl.port;
// fall through
case 6 /* SchemeRelative */:
// The input doesn't have a schema at least, so we need to copy at least that over.
url.scheme = baseUrl.scheme;
}
if (baseType > inputType)
inputType = baseType;
}
normalizePath$4(url, inputType);
const queryHash = url.query + url.hash;
switch (inputType) {
// This is impossible, because of the empty checks at the start of the function.
// case UrlType.Empty:
case 2 /* Hash */:
case 3 /* Query */:
return queryHash;
case 4 /* RelativePath */: {
// The first char is always a "/", and we need it to be relative.
const path = url.path.slice(1);
if (!path)
return queryHash || '.';
if (isRelative(base || input) && !isRelative(path)) {
// If base started with a leading ".", or there is no base and input started with a ".",
// then we need to ensure that the relative path starts with a ".". We don't know if
// relative starts with a "..", though, so check before prepending.
return './' + path + queryHash;
}
return path + queryHash;
}
case 5 /* AbsolutePath */:
return url.path + queryHash;
default:
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
}
}
function resolve$1(input, base) {
// The base is always treated as a directory, if it's not empty.
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
if (base && !base.endsWith('/'))
base += '/';
return resolve$2(input, base);
}
/**
* Removes everything after the last "/", but leaves the slash.
*/
function stripFilename(path) {
if (!path)
return '';
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}
const COLUMN$1 = 0;
const SOURCES_INDEX$1 = 1;
const SOURCE_LINE$1 = 2;
const SOURCE_COLUMN$1 = 3;
const NAMES_INDEX$1 = 4;
function maybeSort(mappings, owned) {
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
if (unsortedIndex === mappings.length)
return mappings;
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
// not, we do not want to modify the consumer's input array.
if (!owned)
mappings = mappings.slice();
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
mappings[i] = sortSegments(mappings[i], owned);
}
return mappings;
}
function nextUnsortedSegmentLine(mappings, start) {
for (let i = start; i < mappings.length; i++) {
if (!isSorted(mappings[i]))
return i;
}
return mappings.length;
}
function isSorted(line) {
for (let j = 1; j < line.length; j++) {
if (line[j][COLUMN$1] < line[j - 1][COLUMN$1]) {
return false;
}
}
return true;
}
function sortSegments(line, owned) {
if (!owned)
line = line.slice();
return line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[COLUMN$1] - b[COLUMN$1];
}
let found = false;
/**
* A binary search implementation that returns the index if a match is found.
* If no match is found, then the left-index (the index associated with the item that comes just
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
* the next index:
*
* ```js
* const array = [1, 3];
* const needle = 2;
* const index = binarySearch(array, needle, (item, needle) => item - needle);
*
* assert.equal(index, 0);
* array.splice(index + 1, 0, needle);
* assert.deepEqual(array, [1, 2, 3]);
* ```
*/
function binarySearch(haystack, needle, low, high) {
while (low <= high) {
const mid = low + ((high - low) >> 1);
const cmp = haystack[mid][COLUMN$1] - needle;
if (cmp === 0) {
found = true;
return mid;
}
if (cmp < 0) {
low = mid + 1;
}
else {
high = mid - 1;
}
}
found = false;
return low - 1;
}
function upperBound(haystack, needle, index) {
for (let i = index + 1; i < haystack.length; index = i++) {
if (haystack[i][COLUMN$1] !== needle)
break;
}
return index;
}
function lowerBound(haystack, needle, index) {
for (let i = index - 1; i >= 0; index = i--) {
if (haystack[i][COLUMN$1] !== needle)
break;
}
return index;
}
function memoizedState() {
return {
lastKey: -1,
lastNeedle: -1,
lastIndex: -1,
};
}
/**
* This overly complicated beast is just to record the last tested line/column and the resulting
* index, allowing us to skip a few tests if mappings are monotonically increasing.
*/
function memoizedBinarySearch(haystack, needle, state, key) {
const { lastKey, lastNeedle, lastIndex } = state;
let low = 0;
let high = haystack.length - 1;
if (key === lastKey) {
if (needle === lastNeedle) {
found = lastIndex !== -1 && haystack[lastIndex][COLUMN$1] === needle;
return lastIndex;
}
if (needle >= lastNeedle) {
// lastIndex may be -1 if the previous needle was not found.
low = lastIndex === -1 ? 0 : lastIndex;
}
else {
high = lastIndex;
}
}
state.lastKey = key;
state.lastNeedle = needle;
return (state.lastIndex = binarySearch(haystack, needle, low, high));
}
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
const LEAST_UPPER_BOUND = -1;
const GREATEST_LOWER_BOUND = 1;
class TraceMap {
constructor(map, mapUrl) {
const isString = typeof map === 'string';
if (!isString && map._decodedMemo)
return map;
const parsed = (isString ? JSON.parse(map) : map);
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
this.version = version;
this.file = file;
this.names = names || [];
this.sourceRoot = sourceRoot;
this.sources = sources;
this.sourcesContent = sourcesContent;
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
const from = resolve$1(sourceRoot || '', stripFilename(mapUrl));
this.resolvedSources = sources.map((s) => resolve$1(s || '', from));
const { mappings } = parsed;
if (typeof mappings === 'string') {
this._encoded = mappings;
this._decoded = undefined;
}
else {
this._encoded = undefined;
this._decoded = maybeSort(mappings, isString);
}
this._decodedMemo = memoizedState();
this._bySources = undefined;
this._bySourceMemos = undefined;
}
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
* with public access modifiers.
*/
function cast$2(map) {
return map;
}
/**
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
*/
function encodedMappings(map) {
var _a;
var _b;
return ((_a = (_b = cast$2(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = encode$1(cast$2(map)._decoded)));
}
/**
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
*/
function decodedMappings(map) {
var _a;
return ((_a = cast$2(map))._decoded || (_a._decoded = decode(cast$2(map)._encoded)));
}
/**
* A low-level API to find the segment associated with a generated line/column (think, from a
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
*/
function traceSegment(map, line, column) {
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length)
return null;
const segments = decoded[line];
const index = traceSegmentInternal(segments, cast$2(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
return index === -1 ? null : segments[index];
}
/**
* A higher-level API to find the source/line/column associated with a generated line/column
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
* `source-map` library.
*/
function originalPositionFor$1(map, needle) {
let { line, column, bias } = needle;
line--;
if (line < 0)
throw new Error(LINE_GTR_ZERO);
if (column < 0)
throw new Error(COL_GTR_EQ_ZERO);
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length)
return OMapping(null, null, null, null);
const segments = decoded[line];
const index = traceSegmentInternal(segments, cast$2(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
if (index === -1)
return OMapping(null, null, null, null);
const segment = segments[index];
if (segment.length === 1)
return OMapping(null, null, null, null);
const { names, resolvedSources } = map;
return OMapping(resolvedSources[segment[SOURCES_INDEX$1]], segment[SOURCE_LINE$1] + 1, segment[SOURCE_COLUMN$1], segment.length === 5 ? names[segment[NAMES_INDEX$1]] : null);
}
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function decodedMap(map) {
return clone(map, decodedMappings(map));
}
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function encodedMap(map) {
return clone(map, encodedMappings(map));
}
function clone(map, mappings) {
return {
version: map.version,
file: map.file,
names: map.names,
sourceRoot: map.sourceRoot,
sources: map.sources,
sourcesContent: map.sourcesContent,
mappings,
ignoreList: map.ignoreList || map.x_google_ignoreList,
};
}
function OMapping(source, line, column, name) {
return { source, line, column, name };
}
function traceSegmentInternal(segments, memo, line, column, bias) {
let index = memoizedBinarySearch(segments, column, memo, line);
if (found) {
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
}
else if (bias === LEAST_UPPER_BOUND)
index++;
if (index === -1 || index === segments.length)
return -1;
return index;
}
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
*
* This is designed to allow synchronizing a second array with the contents of the backing array,
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
* and there are never duplicates.
*/
class SetArray {
constructor() {
this._indexes = { __proto__: null };
this.array = [];
}
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
* with public access modifiers.
*/
function cast$1(set) {
return set;
}
/**
* Gets the index associated with `key` in the backing array, if it is already present.
*/
function get(setarr, key) {
return cast$1(setarr)._indexes[key];
}
/**
* Puts `key` into the backing array, if it is not already present. Returns
* the index of the `key` in the backing array.
*/
function put(setarr, key) {
// The key may or may not be present. If it is present, it's a number.
const index = get(setarr, key);
if (index !== undefined)
return index;
const { array, _indexes: indexes } = cast$1(setarr);
const length = array.push(key);
return (indexes[key] = length - 1);
}
/**
* Removes the key, if it exists in the set.
*/
function remove(setarr, key) {
const index = get(setarr, key);
if (index === undefined)
return;
const { array, _indexes: indexes } = cast$1(setarr);
for (let i = index + 1; i < array.length; i++) {
const k = array[i];
array[i - 1] = k;
indexes[k]--;
}
indexes[key] = undefined;
array.pop();
}
const COLUMN = 0;
const SOURCES_INDEX = 1;
const SOURCE_LINE = 2;
const SOURCE_COLUMN = 3;
const NAMES_INDEX = 4;
const NO_NAME = -1;
/**
* Provides the state to generate a sourcemap.
*/
class GenMapping {
constructor({ file, sourceRoot } = {}) {
this._names = new SetArray();
this._sources = new SetArray();
this._sourcesContent = [];
this._mappings = [];
this.file = file;
this.sourceRoot = sourceRoot;
this._ignoreList = new SetArray();
}
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
* with public access modifiers.
*/
function cast(map) {
return map;
}
/**
* Same as `addSegment`, but will only add the segment if it generates useful information in the
* resulting map. This only works correctly if segments are added **in order**, meaning you should
* not add a segment with a lower generated line/column than one that came before.
*/
const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name);
};
/**
* Adds/removes the content of the source file to the source map.
*/
function setSourceContent(map, source, content) {
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map);
const index = put(sources, source);
sourcesContent[index] = content;
}
function setIgnore(map, source, ignore = true) {
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map);
const index = put(sources, source);
if (index === sourcesContent.length)
sourcesContent[index] = null;
if (ignore)
put(ignoreList, index);
else
remove(ignoreList, index);
}
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function toDecodedMap(map) {
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map);
removeEmptyFinalLines(mappings);
return {
version: 3,
file: map.file || undefined,
names: names.array,
sourceRoot: map.sourceRoot || undefined,
sources: sources.array,
sourcesContent,
mappings,
ignoreList: ignoreList.array,
};
}
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function toEncodedMap(map) {
const decoded = toDecodedMap(map);
return Object.assign(Object.assign({}, decoded), { mappings: encode$1(decoded.mappings) });
}
// This split declaration is only so that terser can elminiate the static initialization block.
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map);
const line = getLine(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipSourceless(line, index))
return;
return insert(line, index, [genColumn]);
}
const sourcesIndex = put(sources, source);
const namesIndex = name ? put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length)
sourcesContent[sourcesIndex] = null;
if (skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
return;
}
return insert(line, index, name
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
}
function getLine(mappings, index) {
for (let i = mappings.length; i <= index; i++) {
mappings[i] = [];
}
return mappings[index];
}
function getColumnIndex(line, genColumn) {
let index = line.length;
for (let i = index - 1; i >= 0; index = i--) {
const current = line[i];
if (genColumn >= current[COLUMN])
break;
}
return index;
}
function insert(array, index, value) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
function removeEmptyFinalLines(mappings) {
const { length } = mappings;
let len = length;
for (let i = len - 1; i >= 0; len = i, i--) {
if (mappings[i].length > 0)
break;
}
if (len < length)
mappings.length = len;
}
function skipSourceless(line, index) {
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
// doesn't generate any useful information.
if (index === 0)
return true;
const prev = line[index - 1];
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
// genrate any new information. Else, this segment will end the source/named segment and point to
// a sourceless position, which is useful.
return prev.length === 1;
}
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
// A source/named segment at the start of a line gives position at that genColumn
if (index === 0)
return false;
const prev = line[index - 1];
// If the previous segment is sourceless, then we're transitioning to a source.
if (prev.length === 1)
return false;
// If the previous segment maps to the exact same source position, then this segment doesn't
// provide any new position information.
return (sourcesIndex === prev[SOURCES_INDEX] &&
sourceLine === prev[SOURCE_LINE] &&
sourceColumn === prev[SOURCE_COLUMN] &&
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
}
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
const EMPTY_SOURCES = [];
function SegmentObject(source, line, column, name, content, ignore) {
return { source, line, column, name, content, ignore };
}
function Source(map, sources, source, content, ignore) {
return {
map,
sources,
source,
content,
ignore,
};
}
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
function MapSource(map, sources) {
return Source(map, sources, '', null, false);
}
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
function OriginalSource(source, content, ignore) {
return Source(null, EMPTY_SOURCES, source, content, ignore);
}
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
function traceMappings(tree) {
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
const gen = new GenMapping({ file: tree.map.file });
const { sources: rootSources, map } = tree;
const rootNames = map.names;
const rootMappings = decodedMappings(map);
for (let i = 0; i < rootMappings.length; i++) {
const segments = rootMappings[i];
for (let j = 0; j < segments.length; j++) {
const segment = segments[j];
const genCol = segment[0];
let traced = SOURCELESS_MAPPING;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length !== 1) {
const source = rootSources[segment[1]];
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
// respective segment into an original source.
if (traced == null)
continue;
}
const { column, line, name, content, source, ignore } = traced;
maybeAddSegment(gen, i, genCol, source, line, column, name);
if (source && content != null)
setSourceContent(gen, source, content);
if (ignore)
setIgnore(gen, source, true);
}
}
return gen;
}
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
function originalPositionFor(source, line, column, name) {
if (!source.map) {
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
}
const segment = traceSegment(source.map, line, column);
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
if (segment == null)
return null;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length === 1)
return SOURCELESS_MAPPING;
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
}
function asArray(value) {
if (Array.isArray(value))
return value;
return [value];
}
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
function buildSourceMapTree(input, loader) {
const maps = asArray(input).map((m) => new TraceMap(m, ''));
const map = maps.pop();
for (let i = 0; i < maps.length; i++) {
if (maps[i].sources.length > 1) {
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
'Did you specify these with the most recent transformation maps first?');
}
}
let tree = build$2(map, loader, '', 0);
for (let i = maps.length - 1; i >= 0; i--) {
tree = MapSource(maps[i], [tree]);
}
return tree;
}
function build$2(map, loader, importer, importerDepth) {
const { resolvedSources, sourcesContent, ignoreList } = map;
const depth = importerDepth + 1;
const children = resolvedSources.map((sourceFile, i) => {
// The loading context gives the loader more information about why this file is being loaded
// (eg, from which importer). It also allows the loader to override the location of the loaded
// sourcemap/original source, or to override the content in the sourcesContent field if it's
// an unmodified source file.
const ctx = {
importer,
depth,
source: sourceFile || '',
content: undefined,
ignore: undefined,
};
// Use the provided loader callback to retrieve the file's sourcemap.
// TODO: We should eventually support async loading of sourcemap files.
const sourceMap = loader(ctx.source, ctx);
const { source, content, ignore } = ctx;
// If there is a sourcemap, then we need to recurse into it to load its source files.
if (sourceMap)
return build$2(new TraceMap(sourceMap, source), loader, source, depth);
// Else, it's an unmodified source file.
// The contents of this unmodified source file can be overridden via the loader context,
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
// the importing sourcemap's `sourcesContent` field.
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
return OriginalSource(source, sourceContent, ignored);
});
return MapSource(map, children);
}
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
class SourceMap {
constructor(map, options) {
const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);
this.version = out.version; // SourceMap spec says this should be first.
this.file = out.file;
this.mappings = out.mappings;
this.names = out.names;
this.ignoreList = out.ignoreList;
this.sourceRoot = out.sourceRoot;
this.sources = out.sources;
if (!options.excludeContent) {
this.sourcesContent = out.sourcesContent;
}
}
toString() {
return JSON.stringify(this);
}
}
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
function remapping(input, loader, options) {
const opts = { excludeContent: !!options, decodedMappings: false };
const tree = buildSourceMapTree(input, loader);
return new SourceMap(traceMappings(tree), opts);
}
var src$3 = {exports: {}};
var browser$3 = {exports: {}};
/**
* Helpers.
*/
var ms$1;
var hasRequiredMs$1;
function requireMs$1 () {
if (hasRequiredMs$1) return ms$1;
hasRequiredMs$1 = 1;
var s = 1000;
var m = s * 60;
var h = m * 60;
var d = h * 24;
var w = d * 7;
var y = d * 365.25;
/**
* Parse or format the given `val`.
*
* Options:
*
* - `long` verbose formatting [false]
*
* @param {String|Number} val
* @param {Object} [options]
* @throws {Error} throw an error if val is not a non-empty string or a number
* @return {String|Number}
* @api public
*/
ms$1 = function(val, options) {
options = options || {};
var type = typeof val;
if (type === 'string' && val.length > 0) {
return parse(val);
} else if (type === 'number' && isFinite(val)) {
return options.long ? fmtLong(val) : fmtShort(val);
}
throw new Error(
'val is not a non-empty string or a valid number. val=' +
JSON.stringify(val)
);
};
/**
* Parse the given `str` and return milliseconds.
*
* @param {String} str
* @return {Number}
* @api private
*/
function parse(str) {
str = String(str);
if (str.length > 100) {
return;
}
var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
str
);
if (!match) {
return;
}
var n = parseFloat(match[1]);
var type = (match[2] || 'ms').toLowerCase();
switch (type) {
case 'years':
case 'year':
case 'yrs':
case 'yr':
case 'y':
return n * y;
case 'weeks':
case 'week':
case 'w':
return n * w;
case 'days':
case 'day':
case 'd':
return n * d;
case 'hours':
case 'hour':
case 'hrs':
case 'hr':
case 'h':
return n * h;
case 'minutes':
case 'minute':
case 'mins':
case 'min':
case 'm':
return n * m;
case 'seconds':
case 'second':
case 'secs':
case 'sec':
case 's':
return n * s;
case 'milliseconds':
case 'millisecond':
case 'msecs':
case 'msec':
case 'ms':
return n;
default:
return undefined;
}
}
/**
* Short format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtShort(ms) {
var msAbs = Math.abs(ms);
if (msAbs >= d) {
return Math.round(ms / d) + 'd';
}
if (msAbs >= h) {
return Math.round(ms / h) + 'h';
}
if (msAbs >= m) {
return Math.round(ms / m) + 'm';
}
if (msAbs >= s) {
return Math.round(ms / s) + 's';
}
return ms + 'ms';
}
/**
* Long format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtLong(ms) {
var msAbs = Math.abs(ms);
if (msAbs >= d) {
return plural(ms, msAbs, d, 'day');
}
if (msAbs >= h) {
return plural(ms, msAbs, h, 'hour');
}
if (msAbs >= m) {
return plural(ms, msAbs, m, 'minute');
}
if (msAbs >= s) {
return plural(ms, msAbs, s, 'second');
}
return ms + ' ms';
}
/**
* Pluralization helper.
*/
function plural(ms, msAbs, n, name) {
var isPlural = msAbs >= n * 1.5;
return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');
}
return ms$1;
}
var common$b;
var hasRequiredCommon;
function requireCommon () {
if (hasRequiredCommon) return common$b;
hasRequiredCommon = 1;
/**
* This is the common logic for both the Node.js and web browser
* implementations of `debug()`.
*/
function setup(env) {
createDebug.debug = createDebug;
createDebug.default = createDebug;
createDebug.coerce = coerce;
createDebug.disable = disable;
createDebug.enable = enable;
createDebug.enabled = enabled;
createDebug.humanize = requireMs$1();
createDebug.destroy = destroy;
Object.keys(env).forEach(key => {
createDebug[key] = env[key];
});
/**
* The currently active debug mode names, and names to skip.
*/
createDebug.names = [];
createDebug.skips = [];
/**
* Map of special "%n" handling functions, for the debug "format" argument.
*
* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
*/
createDebug.formatters = {};
/**
* Selects a color for a debug namespace
* @param {String} namespace The namespace string for the debug instance to be colored
* @return {Number|String} An ANSI color code for the given namespace
* @api private
*/
function selectColor(namespace) {
let hash = 0;
for (let i = 0; i < namespace.length; i++) {
hash = ((hash << 5) - hash) + namespace.charCodeAt(i);
hash |= 0; // Convert to 32bit integer
}
return createDebug.colors[Math.abs(hash) % createDebug.colors.length];
}
createDebug.selectColor = selectColor;
/**
* Create a debugger with the given `namespace`.
*
* @param {String} namespace
* @return {Function}
* @api public
*/
function createDebug(namespace) {
let prevTime;
let enableOverride = null;
let namespacesCache;
let enabledCache;
function debug(...args) {
// Disabled?
if (!debug.enabled) {
return;
}
const self = debug;
// Set `diff` timestamp
const curr = Number(new Date());
const ms = curr - (prevTime || curr);
self.diff = ms;
self.prev = prevTime;
self.curr = curr;
prevTime = curr;
args[0] = createDebug.coerce(args[0]);
if (typeof args[0] !== 'string') {
// Anything else let's inspect with %O
args.unshift('%O');
}
// Apply any `formatters` transformations
let index = 0;
args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => {
// If we encounter an escaped % then don't increase the array index
if (match === '%%') {
return '%';
}
index++;
const formatter = createDebug.formatters[format];
if (typeof formatter === 'function') {
const val = args[index];
match = formatter.call(self, val);
// Now we need to remove `args[index]` since it's inlined in the `format`
args.splice(index, 1);
index--;
}
return match;
});
// Apply env-specific formatting (colors, etc.)
createDebug.formatArgs.call(self, args);
const logFn = self.log || createDebug.log;
logFn.apply(self, args);
}
debug.namespace = namespace;
debug.useColors = createDebug.useColors();
debug.color = createDebug.selectColor(namespace);
debug.extend = extend;
debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release.
Object.defineProperty(debug, 'enabled', {
enumerable: true,
configurable: false,
get: () => {
if (enableOverride !== null) {
return enableOverride;
}
if (namespacesCache !== createDebug.namespaces) {
namespacesCache = createDebug.namespaces;
enabledCache = createDebug.enabled(namespace);
}
return enabledCache;
},
set: v => {
enableOverride = v;
}
});
// Env-specific initialization logic for debug instances
if (typeof createDebug.init === 'function') {
createDebug.init(debug);
}
return debug;
}
function extend(namespace, delimiter) {
const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);
newDebug.log = this.log;
return newDebug;
}
/**
* Enables a debug mode by namespaces. This can include modes
* separated by a colon and wildcards.
*
* @param {String} namespaces
* @api public
*/
function enable(namespaces) {
createDebug.save(namespaces);
createDebug.namespaces = namespaces;
createDebug.names = [];
createDebug.skips = [];
let i;
const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/);
const len = split.length;
for (i = 0; i < len; i++) {
if (!split[i]) {
// ignore empty strings
continue;
}
namespaces = split[i].replace(/\*/g, '.*?');
if (namespaces[0] === '-') {
createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$'));
} else {
createDebug.names.push(new RegExp('^' + namespaces + '$'));
}
}
}
/**
* Disable debug output.
*
* @return {String} namespaces
* @api public
*/
function disable() {
const namespaces = [
...createDebug.names.map(toNamespace),
...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace)
].join(',');
createDebug.enable('');
return namespaces;
}
/**
* Returns true if the given mode name is enabled, false otherwise.
*
* @param {String} name
* @return {Boolean}
* @api public
*/
function enabled(name) {
if (name[name.length - 1] === '*') {
return true;
}
let i;
let len;
for (i = 0, len = createDebug.skips.length; i < len; i++) {
if (createDebug.skips[i].test(name)) {
return false;
}
}
for (i = 0, len = createDebug.names.length; i < len; i++) {
if (createDebug.names[i].test(name)) {
return true;
}
}
return false;
}
/**
* Convert regexp to namespace
*
* @param {RegExp} regxep
* @return {String} namespace
* @api private
*/
function toNamespace(regexp) {
return regexp.toString()
.substring(2, regexp.toString().length - 2)
.replace(/\.\*\?$/, '*');
}
/**
* Coerce `val`.
*
* @param {Mixed} val
* @return {Mixed}
* @api private
*/
function coerce(val) {
if (val instanceof Error) {
return val.stack || val.message;
}
return val;
}
/**
* XXX DO NOT USE. This is a temporary stub function.
* XXX It WILL be removed in the next major release.
*/
function destroy() {
console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');
}
createDebug.enable(createDebug.load());
return createDebug;
}
common$b = setup;
return common$b;
}
/* eslint-env browser */
var hasRequiredBrowser$1;
function requireBrowser$1 () {
if (hasRequiredBrowser$1) return browser$3.exports;
hasRequiredBrowser$1 = 1;
(function (module, exports) {
/**
* This is the web browser implementation of `debug()`.
*/
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
exports.storage = localstorage();
exports.destroy = (() => {
let warned = false;
return () => {
if (!warned) {
warned = true;
console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');
}
};
})();
/**
* Colors.
*/
exports.colors = [
'#0000CC',
'#0000FF',
'#0033CC',
'#0033FF',
'#0066CC',
'#0066FF',
'#0099CC',
'#0099FF',
'#00CC00',
'#00CC33',
'#00CC66',
'#00CC99',
'#00CCCC',
'#00CCFF',
'#3300CC',
'#3300FF',
'#3333CC',
'#3333FF',
'#3366CC',
'#3366FF',
'#3399CC',
'#3399FF',
'#33CC00',
'#33CC33',
'#33CC66',
'#33CC99',
'#33CCCC',
'#33CCFF',
'#6600CC',
'#6600FF',
'#6633CC',
'#6633FF',
'#66CC00',
'#66CC33',
'#9900CC',
'#9900FF',
'#9933CC',
'#9933FF',
'#99CC00',
'#99CC33',
'#CC0000',
'#CC0033',
'#CC0066',
'#CC0099',
'#CC00CC',
'#CC00FF',
'#CC3300',
'#CC3333',
'#CC3366',
'#CC3399',
'#CC33CC',
'#CC33FF',
'#CC6600',
'#CC6633',
'#CC9900',
'#CC9933',
'#CCCC00',
'#CCCC33',
'#FF0000',
'#FF0033',
'#FF0066',
'#FF0099',
'#FF00CC',
'#FF00FF',
'#FF3300',
'#FF3333',
'#FF3366',
'#FF3399',
'#FF33CC',
'#FF33FF',
'#FF6600',
'#FF6633',
'#FF9900',
'#FF9933',
'#FFCC00',
'#FFCC33'
];
/**
* Currently only WebKit-based Web Inspectors, Firefox >= v31,
* and the Firebug extension (any Firefox version) are known
* to support "%c" CSS customizations.
*
* TODO: add a `localStorage` variable to explicitly enable/disable colors
*/
// eslint-disable-next-line complexity
function useColors() {
// NB: In an Electron preload script, document will be defined but not fully
// initialized. Since we know we're in Chrome, we'll just detect this case
// explicitly
if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {
return true;
}
// Internet Explorer and Edge do not support colors.
if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) {
return false;
}
let m;
// Is webkit? http://stackoverflow.com/a/16459606/376773
// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632
return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||
// Is firebug? http://stackoverflow.com/a/398120/376773
(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||
// Is firefox >= v31?
// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
(typeof navigator !== 'undefined' && navigator.userAgent && (m = navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)) && parseInt(m[1], 10) >= 31) ||
// Double check webkit in userAgent just in case we are in a worker
(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/));
}
/**
* Colorize log arguments if enabled.
*
* @api public
*/
function formatArgs(args) {
args[0] = (this.useColors ? '%c' : '') +
this.namespace +
(this.useColors ? ' %c' : ' ') +
args[0] +
(this.useColors ? '%c ' : ' ') +
'+' + module.exports.humanize(this.diff);
if (!this.useColors) {
return;
}
const c = 'color: ' + this.color;
args.splice(1, 0, c, 'color: inherit');
// The final "%c" is somewhat tricky, because there could be other
// arguments passed either before or after the %c, so we need to
// figure out the correct index to insert the CSS into
let index = 0;
let lastC = 0;
args[0].replace(/%[a-zA-Z%]/g, match => {
if (match === '%%') {
return;
}
index++;
if (match === '%c') {
// We only are interested in the *last* %c
// (the user may have provided their own)
lastC = index;
}
});
args.splice(lastC, 0, c);
}
/**
* Invokes `console.debug()` when available.
* No-op when `console.debug` is not a "function".
* If `console.debug` is not available, falls back
* to `console.log`.
*
* @api public
*/
exports.log = console.debug || console.log || (() => {});
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
try {
if (namespaces) {
exports.storage.setItem('debug', namespaces);
} else {
exports.storage.removeItem('debug');
}
} catch (error) {
// Swallow
// XXX (@Qix-) should we be logging these?
}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
let r;
try {
r = exports.storage.getItem('debug');
} catch (error) {
// Swallow
// XXX (@Qix-) should we be logging these?
}
// If debug isn't set in LS, and we're in Electron, try to load $DEBUG
if (!r && typeof process !== 'undefined' && 'env' in process) {
r = process.env.DEBUG;
}
return r;
}
/**
* Localstorage attempts to return the localstorage.
*
* This is necessary because safari throws
* when a user disables cookies/localstorage
* and you attempt to access it.
*
* @return {LocalStorage}
* @api private
*/
function localstorage() {
try {
// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context
// The Browser also has localStorage in the global context.
return localStorage;
} catch (error) {
// Swallow
// XXX (@Qix-) should we be logging these?
}
}
module.exports = requireCommon()(exports);
const {formatters} = module.exports;
/**
* Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.
*/
formatters.j = function (v) {
try {
return JSON.stringify(v);
} catch (error) {
return '[UnexpectedJSONParseError]: ' + error.message;
}
};
} (browser$3, browser$3.exports));
return browser$3.exports;
}
var node$1 = {exports: {}};
/**
* Module dependencies.
*/
var hasRequiredNode$1;
function requireNode$1 () {
if (hasRequiredNode$1) return node$1.exports;
hasRequiredNode$1 = 1;
(function (module, exports) {
const tty = require$$0$3;
const util = require$$0$5;
/**
* This is the Node.js implementation of `debug()`.
*/
exports.init = init;
exports.log = log;
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
exports.destroy = util.deprecate(
() => {},
'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'
);
/**
* Colors.
*/
exports.colors = [6, 2, 3, 4, 5, 1];
try {
// Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)
// eslint-disable-next-line import/no-extraneous-dependencies
const supportsColor = require('supports-color');
if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {
exports.colors = [
20,
21,
26,
27,
32,
33,
38,
39,
40,
41,
42,
43,
44,
45,
56,
57,
62,
63,
68,
69,
74,
75,
76,
77,
78,
79,
80,
81,
92,
93,
98,
99,
112,
113,
128,
129,
134,
135,
148,
149,
160,
161,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
178,
179,
184,
185,
196,
197,
198,
199,
200,
201,
202,
203,
204,
205,
206,
207,
208,
209,
214,
215,
220,
221
];
}
} catch (error) {
// Swallow - we only care if `supports-color` is available; it doesn't have to be.
}
/**
* Build up the default `inspectOpts` object from the environment variables.
*
* $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
*/
exports.inspectOpts = Object.keys(process.env).filter(key => {
return /^debug_/i.test(key);
}).reduce((obj, key) => {
// Camel-case
const prop = key
.substring(6)
.toLowerCase()
.replace(/_([a-z])/g, (_, k) => {
return k.toUpperCase();
});
// Coerce string value into JS value
let val = process.env[key];
if (/^(yes|on|true|enabled)$/i.test(val)) {
val = true;
} else if (/^(no|off|false|disabled)$/i.test(val)) {
val = false;
} else if (val === 'null') {
val = null;
} else {
val = Number(val);
}
obj[prop] = val;
return obj;
}, {});
/**
* Is stdout a TTY? Colored output is enabled when `true`.
*/
function useColors() {
return 'colors' in exports.inspectOpts ?
Boolean(exports.inspectOpts.colors) :
tty.isatty(process.stderr.fd);
}
/**
* Adds ANSI color escape codes if enabled.
*
* @api public
*/
function formatArgs(args) {
const {namespace: name, useColors} = this;
if (useColors) {
const c = this.color;
const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c);
const prefix = ` ${colorCode};1m${name} \u001B[0m`;
args[0] = prefix + args[0].split('\n').join('\n' + prefix);
args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m');
} else {
args[0] = getDate() + name + ' ' + args[0];
}
}
function getDate() {
if (exports.inspectOpts.hideDate) {
return '';
}
return new Date().toISOString() + ' ';
}
/**
* Invokes `util.formatWithOptions()` with the specified arguments and writes to stderr.
*/
function log(...args) {
return process.stderr.write(util.formatWithOptions(exports.inspectOpts, ...args) + '\n');
}
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
if (namespaces) {
process.env.DEBUG = namespaces;
} else {
// If you set a process.env field to null or undefined, it gets cast to the
// string 'null' or 'undefined'. Just delete instead.
delete process.env.DEBUG;
}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
return process.env.DEBUG;
}
/**
* Init logic for `debug` instances.
*
* Create a new `inspectOpts` object in case `useColors` is set
* differently for a particular `debug` instance.
*/
function init(debug) {
debug.inspectOpts = {};
const keys = Object.keys(exports.inspectOpts);
for (let i = 0; i < keys.length; i++) {
debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
}
}
module.exports = requireCommon()(exports);
const {formatters} = module.exports;
/**
* Map %o to `util.inspect()`, all on a single line.
*/
formatters.o = function (v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts)
.split('\n')
.map(str => str.trim())
.join(' ');
};
/**
* Map %O to `util.inspect()`, allowing multiple lines if needed.
*/
formatters.O = function (v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts);
};
} (node$1, node$1.exports));
return node$1.exports;
}
/**
* Detect Electron renderer / nwjs process, which is node, but we should
* treat as a browser.
*/
if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) {
src$3.exports = requireBrowser$1();
} else {
src$3.exports = requireNode$1();
}
var srcExports$1 = src$3.exports;
var debug$i = /*@__PURE__*/getDefaultExportFromCjs(srcExports$1);
let pnp;
if (process.versions.pnp) {
try {
pnp = createRequire$1(import.meta.url)("pnpapi");
} catch {
}
}
function invalidatePackageData(packageCache, pkgPath) {
const pkgDir = normalizePath$3(path$n.dirname(pkgPath));
packageCache.forEach((pkg, cacheKey) => {
if (pkg.dir === pkgDir) {
packageCache.delete(cacheKey);
}
});
}
function resolvePackageData(pkgName, basedir, preserveSymlinks = false, packageCache) {
if (pnp) {
const cacheKey = getRpdCacheKey(pkgName, basedir, preserveSymlinks);
if (packageCache?.has(cacheKey)) return packageCache.get(cacheKey);
try {
const pkg = pnp.resolveToUnqualified(pkgName, basedir, {
considerBuiltins: false
});
if (!pkg) return null;
const pkgData = loadPackageData(path$n.join(pkg, "package.json"));
packageCache?.set(cacheKey, pkgData);
return pkgData;
} catch {
return null;
}
}
const originalBasedir = basedir;
while (basedir) {
if (packageCache) {
const cached = getRpdCache(
packageCache,
pkgName,
basedir,
originalBasedir,
preserveSymlinks
);
if (cached) return cached;
}
const pkg = path$n.join(basedir, "node_modules", pkgName, "package.json");
try {
if (fs__default.existsSync(pkg)) {
const pkgPath = preserveSymlinks ? pkg : safeRealpathSync(pkg);
const pkgData = loadPackageData(pkgPath);
if (packageCache) {
setRpdCache(
packageCache,
pkgData,
pkgName,
basedir,
originalBasedir,
preserveSymlinks
);
}
return pkgData;
}
} catch {
}
const nextBasedir = path$n.dirname(basedir);
if (nextBasedir === basedir) break;
basedir = nextBasedir;
}
return null;
}
function findNearestPackageData(basedir, packageCache) {
const originalBasedir = basedir;
while (basedir) {
if (packageCache) {
const cached = getFnpdCache(packageCache, basedir, originalBasedir);
if (cached) return cached;
}
const pkgPath = path$n.join(basedir, "package.json");
if (tryStatSync(pkgPath)?.isFile()) {
try {
const pkgData = loadPackageData(pkgPath);
if (packageCache) {
setFnpdCache(packageCache, pkgData, basedir, originalBasedir);
}
return pkgData;
} catch {
}
}
const nextBasedir = path$n.dirname(basedir);
if (nextBasedir === basedir) break;
basedir = nextBasedir;
}
return null;
}
function findNearestMainPackageData(basedir, packageCache) {
const nearestPackage = findNearestPackageData(basedir, packageCache);
return nearestPackage && (nearestPackage.data.name ? nearestPackage : findNearestMainPackageData(
path$n.dirname(nearestPackage.dir),
packageCache
));
}
function loadPackageData(pkgPath) {
const data = JSON.parse(fs__default.readFileSync(pkgPath, "utf-8"));
const pkgDir = normalizePath$3(path$n.dirname(pkgPath));
const { sideEffects } = data;
let hasSideEffects;
if (typeof sideEffects === "boolean") {
hasSideEffects = () => sideEffects;
} else if (Array.isArray(sideEffects)) {
if (sideEffects.length <= 0) {
hasSideEffects = () => false;
} else {
const finalPackageSideEffects = sideEffects.map((sideEffect) => {
if (sideEffect.includes("/")) {
return sideEffect;
}
return `**/${sideEffect}`;
});
hasSideEffects = createFilter(finalPackageSideEffects, null, {
resolve: pkgDir
});
}
} else {
hasSideEffects = () => null;
}
const pkg = {
dir: pkgDir,
data,
hasSideEffects,
webResolvedImports: {},
nodeResolvedImports: {},
setResolvedCache(key, entry, targetWeb) {
if (targetWeb) {
pkg.webResolvedImports[key] = entry;
} else {
pkg.nodeResolvedImports[key] = entry;
}
},
getResolvedCache(key, targetWeb) {
if (targetWeb) {
return pkg.webResolvedImports[key];
} else {
return pkg.nodeResolvedImports[key];
}
}
};
return pkg;
}
function watchPackageDataPlugin(packageCache) {
const watchQueue = /* @__PURE__ */ new Set();
const watchedDirs = /* @__PURE__ */ new Set();
const watchFileStub = (id) => {
watchQueue.add(id);
};
let watchFile = watchFileStub;
const setPackageData = packageCache.set.bind(packageCache);
packageCache.set = (id, pkg) => {
if (!isInNodeModules$1(pkg.dir) && !watchedDirs.has(pkg.dir)) {
watchedDirs.add(pkg.dir);
watchFile(path$n.join(pkg.dir, "package.json"));
}
return setPackageData(id, pkg);
};
return {
name: "vite:watch-package-data",
buildStart() {
watchFile = this.addWatchFile.bind(this);
watchQueue.forEach(watchFile);
watchQueue.clear();
},
buildEnd() {
watchFile = watchFileStub;
},
watchChange(id) {
if (id.endsWith("/package.json")) {
invalidatePackageData(packageCache, path$n.normalize(id));
}
}
};
}
function getRpdCache(packageCache, pkgName, basedir, originalBasedir, preserveSymlinks) {
const cacheKey = getRpdCacheKey(pkgName, basedir, preserveSymlinks);
const pkgData = packageCache.get(cacheKey);
if (pkgData) {
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
packageCache.set(getRpdCacheKey(pkgName, dir, preserveSymlinks), pkgData);
});
return pkgData;
}
}
function setRpdCache(packageCache, pkgData, pkgName, basedir, originalBasedir, preserveSymlinks) {
packageCache.set(getRpdCacheKey(pkgName, basedir, preserveSymlinks), pkgData);
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
packageCache.set(getRpdCacheKey(pkgName, dir, preserveSymlinks), pkgData);
});
}
function getRpdCacheKey(pkgName, basedir, preserveSymlinks) {
return `rpd_${pkgName}_${basedir}_${preserveSymlinks}`;
}
function getFnpdCache(packageCache, basedir, originalBasedir) {
const cacheKey = getFnpdCacheKey(basedir);
const pkgData = packageCache.get(cacheKey);
if (pkgData) {
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
packageCache.set(getFnpdCacheKey(dir), pkgData);
});
return pkgData;
}
}
function setFnpdCache(packageCache, pkgData, basedir, originalBasedir) {
packageCache.set(getFnpdCacheKey(basedir), pkgData);
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
packageCache.set(getFnpdCacheKey(dir), pkgData);
});
}
function getFnpdCacheKey(basedir) {
return `fnpd_${basedir}`;
}
function traverseBetweenDirs(longerDir, shorterDir, cb) {
while (longerDir !== shorterDir) {
cb(longerDir);
longerDir = path$n.dirname(longerDir);
}
}
const createFilter = createFilter$1;
const replaceSlashOrColonRE = /[/:]/g;
const replaceDotRE = /\./g;
const replaceNestedIdRE = /\s*>\s*/g;
const replaceHashRE = /#/g;
const flattenId = (id) => {
const flatId = limitFlattenIdLength(
id.replace(replaceSlashOrColonRE, "_").replace(replaceDotRE, "__").replace(replaceNestedIdRE, "___").replace(replaceHashRE, "____")
);
return flatId;
};
const FLATTEN_ID_HASH_LENGTH = 8;
const FLATTEN_ID_MAX_FILE_LENGTH = 170;
const limitFlattenIdLength = (id, limit = FLATTEN_ID_MAX_FILE_LENGTH) => {
if (id.length <= limit) {
return id;
}
return id.slice(0, limit - (FLATTEN_ID_HASH_LENGTH + 1)) + "_" + getHash(id);
};
const normalizeId = (id) => id.replace(replaceNestedIdRE, " > ");
const NODE_BUILTIN_NAMESPACE = "node:";
const NPM_BUILTIN_NAMESPACE = "npm:";
const BUN_BUILTIN_NAMESPACE = "bun:";
const nodeBuiltins = builtinModules.filter((id) => !id.includes(":"));
function isBuiltin(id) {
if (process.versions.deno && id.startsWith(NPM_BUILTIN_NAMESPACE)) return true;
if (process.versions.bun && id.startsWith(BUN_BUILTIN_NAMESPACE)) return true;
return isNodeBuiltin(id);
}
function isNodeBuiltin(id) {
if (id.startsWith(NODE_BUILTIN_NAMESPACE)) return true;
return nodeBuiltins.includes(id);
}
function isInNodeModules$1(id) {
return id.includes("node_modules");
}
function moduleListContains(moduleList, id) {
return moduleList?.some(
(m) => m === id || id.startsWith(withTrailingSlash(m))
);
}
function isOptimizable(id, optimizeDeps) {
const { extensions } = optimizeDeps;
return OPTIMIZABLE_ENTRY_RE.test(id) || (extensions?.some((ext) => id.endsWith(ext)) ?? false);
}
const bareImportRE = /^(?![a-zA-Z]:)[\w@](?!.*:\/\/)/;
const deepImportRE = /^([^@][^/]*)\/|^(@[^/]+\/[^/]+)\//;
const _require$1 = createRequire$1(import.meta.url);
function resolveDependencyVersion(dep, pkgRelativePath = "../../package.json") {
const pkgPath = path$n.resolve(_require$1.resolve(dep), pkgRelativePath);
return JSON.parse(fs__default.readFileSync(pkgPath, "utf-8")).version;
}
const rollupVersion = resolveDependencyVersion("rollup");
const filter = process.env.VITE_DEBUG_FILTER;
const DEBUG = process.env.DEBUG;
function createDebugger(namespace, options = {}) {
const log = debug$i(namespace);
const { onlyWhenFocused } = options;
let enabled = log.enabled;
if (enabled && onlyWhenFocused) {
const ns = typeof onlyWhenFocused === "string" ? onlyWhenFocused : namespace;
enabled = !!DEBUG?.includes(ns);
}
if (enabled) {
return (...args) => {
if (!filter || args.some((a) => a?.includes?.(filter))) {
log(...args);
}
};
}
}
function testCaseInsensitiveFS() {
if (!CLIENT_ENTRY.endsWith("client.mjs")) {
throw new Error(
`cannot test case insensitive FS, CLIENT_ENTRY const doesn't contain client.mjs`
);
}
if (!fs__default.existsSync(CLIENT_ENTRY)) {
throw new Error(
"cannot test case insensitive FS, CLIENT_ENTRY does not point to an existing file: " + CLIENT_ENTRY
);
}
return fs__default.existsSync(CLIENT_ENTRY.replace("client.mjs", "cLiEnT.mjs"));
}
const urlCanParse = (
// eslint-disable-next-line n/no-unsupported-features/node-builtins
URL$3.canParse ?? // URL.canParse is supported from Node.js 18.17.0+, 20.0.0+
((path2, base) => {
try {
new URL$3(path2, base);
return true;
} catch {
return false;
}
})
);
const isCaseInsensitiveFS = testCaseInsensitiveFS();
const VOLUME_RE = /^[A-Z]:/i;
function normalizePath$3(id) {
return path$n.posix.normalize(isWindows$3 ? slash$1(id) : id);
}
function fsPathFromId(id) {
const fsPath = normalizePath$3(
id.startsWith(FS_PREFIX) ? id.slice(FS_PREFIX.length) : id
);
return fsPath[0] === "/" || VOLUME_RE.test(fsPath) ? fsPath : `/${fsPath}`;
}
function fsPathFromUrl(url) {
return fsPathFromId(cleanUrl(url));
}
function isParentDirectory(dir, file) {
dir = withTrailingSlash(dir);
return file.startsWith(dir) || isCaseInsensitiveFS && file.toLowerCase().startsWith(dir.toLowerCase());
}
function isSameFileUri(file1, file2) {
return file1 === file2 || isCaseInsensitiveFS && file1.toLowerCase() === file2.toLowerCase();
}
const externalRE = /^(https?:)?\/\//;
const isExternalUrl = (url) => externalRE.test(url);
const dataUrlRE = /^\s*data:/i;
const isDataUrl = (url) => dataUrlRE.test(url);
const virtualModuleRE = /^virtual-module:.*/;
const virtualModulePrefix = "virtual-module:";
const knownJsSrcRE = /\.(?:[jt]sx?|m[jt]s|vue|marko|svelte|astro|imba|mdx)(?:$|\?)/;
const isJSRequest = (url) => {
url = cleanUrl(url);
if (knownJsSrcRE.test(url)) {
return true;
}
if (!path$n.extname(url) && url[url.length - 1] !== "/") {
return true;
}
return false;
};
const knownTsRE = /\.(?:ts|mts|cts|tsx)(?:$|\?)/;
const isTsRequest = (url) => knownTsRE.test(url);
const importQueryRE = /(\?|&)import=?(?:&|$)/;
const directRequestRE$1 = /(\?|&)direct=?(?:&|$)/;
const internalPrefixes = [
FS_PREFIX,
VALID_ID_PREFIX,
CLIENT_PUBLIC_PATH,
ENV_PUBLIC_PATH
];
const InternalPrefixRE = new RegExp(`^(?:${internalPrefixes.join("|")})`);
const trailingSeparatorRE = /[?&]$/;
const isImportRequest = (url) => importQueryRE.test(url);
const isInternalRequest = (url) => InternalPrefixRE.test(url);
function removeImportQuery(url) {
return url.replace(importQueryRE, "$1").replace(trailingSeparatorRE, "");
}
function removeDirectQuery(url) {
return url.replace(directRequestRE$1, "$1").replace(trailingSeparatorRE, "");
}
const urlRE = /(\?|&)url(?:&|$)/;
const rawRE = /(\?|&)raw(?:&|$)/;
function removeUrlQuery(url) {
return url.replace(urlRE, "$1").replace(trailingSeparatorRE, "");
}
const replacePercentageRE = /%/g;
function injectQuery(url, queryToInject) {
const resolvedUrl = new URL$3(
url.replace(replacePercentageRE, "%25"),
"relative:///"
);
const { search, hash } = resolvedUrl;
let pathname = cleanUrl(url);
pathname = isWindows$3 ? slash$1(pathname) : pathname;
return `${pathname}?${queryToInject}${search ? `&` + search.slice(1) : ""}${hash ?? ""}`;
}
const timestampRE = /\bt=\d{13}&?\b/;
function removeTimestampQuery(url) {
return url.replace(timestampRE, "").replace(trailingSeparatorRE, "");
}
async function asyncReplace(input, re, replacer) {
let match;
let remaining = input;
let rewritten = "";
while (match = re.exec(remaining)) {
rewritten += remaining.slice(0, match.index);
rewritten += await replacer(match);
remaining = remaining.slice(match.index + match[0].length);
}
rewritten += remaining;
return rewritten;
}
function timeFrom(start, subtract = 0) {
const time = performance$1.now() - start - subtract;
const timeString = (time.toFixed(2) + `ms`).padEnd(5, " ");
if (time < 10) {
return colors$1.green(timeString);
} else if (time < 50) {
return colors$1.yellow(timeString);
} else {
return colors$1.red(timeString);
}
}
function prettifyUrl(url, root) {
url = removeTimestampQuery(url);
const isAbsoluteFile = url.startsWith(root);
if (isAbsoluteFile || url.startsWith(FS_PREFIX)) {
const file = path$n.posix.relative(
root,
isAbsoluteFile ? url : fsPathFromId(url)
);
return colors$1.dim(file);
} else {
return colors$1.dim(url);
}
}
function isObject$1(value) {
return Object.prototype.toString.call(value) === "[object Object]";
}
function isDefined(value) {
return value != null;
}
function tryStatSync(file) {
try {
return fs__default.statSync(file, { throwIfNoEntry: false });
} catch {
}
}
function lookupFile(dir, fileNames) {
while (dir) {
for (const fileName of fileNames) {
const fullPath = path$n.join(dir, fileName);
if (tryStatSync(fullPath)?.isFile()) return fullPath;
}
const parentDir = path$n.dirname(dir);
if (parentDir === dir) return;
dir = parentDir;
}
}
function isFilePathESM(filePath, packageCache) {
if (/\.m[jt]s$/.test(filePath)) {
return true;
} else if (/\.c[jt]s$/.test(filePath)) {
return false;
} else {
try {
const pkg = findNearestPackageData(path$n.dirname(filePath), packageCache);
return pkg?.data.type === "module";
} catch {
return false;
}
}
}
const splitRE = /\r?\n/g;
const range = 2;
function pad$1(source, n = 2) {
const lines = source.split(splitRE);
return lines.map((l) => ` `.repeat(n) + l).join(`
`);
}
function posToNumber(source, pos) {
if (typeof pos === "number") return pos;
const lines = source.split(splitRE);
const { line, column } = pos;
let start = 0;
for (let i = 0; i < line - 1 && i < lines.length; i++) {
start += lines[i].length + 1;
}
return start + column;
}
function numberToPos(source, offset) {
if (typeof offset !== "number") return offset;
if (offset > source.length) {
throw new Error(
`offset is longer than source length! offset ${offset} > length ${source.length}`
);
}
const lines = source.split(splitRE);
let counted = 0;
let line = 0;
let column = 0;
for (; line < lines.length; line++) {
const lineLength = lines[line].length + 1;
if (counted + lineLength >= offset) {
column = offset - counted + 1;
break;
}
counted += lineLength;
}
return { line: line + 1, column };
}
function generateCodeFrame(source, start = 0, end) {
start = Math.max(posToNumber(source, start), 0);
end = Math.min(
end !== void 0 ? posToNumber(source, end) : start,
source.length
);
const lines = source.split(splitRE);
let count = 0;
const res = [];
for (let i = 0; i < lines.length; i++) {
count += lines[i].length;
if (count >= start) {
for (let j = i - range; j <= i + range || end > count; j++) {
if (j < 0 || j >= lines.length) continue;
const line = j + 1;
res.push(
`${line}${" ".repeat(Math.max(3 - String(line).length, 0))}| ${lines[j]}`
);
const lineLength = lines[j].length;
if (j === i) {
const pad2 = Math.max(start - (count - lineLength), 0);
const length = Math.max(
1,
end > count ? lineLength - pad2 : end - start
);
res.push(` | ` + " ".repeat(pad2) + "^".repeat(length));
} else if (j > i) {
if (end > count) {
const length = Math.max(Math.min(end - count, lineLength), 1);
res.push(` | ` + "^".repeat(length));
}
count += lineLength + 1;
}
}
break;
}
count++;
}
return res.join("\n");
}
function isFileReadable(filename) {
if (!tryStatSync(filename)) {
return false;
}
try {
fs__default.accessSync(filename, fs__default.constants.R_OK);
return true;
} catch {
return false;
}
}
const splitFirstDirRE = /(.+?)[\\/](.+)/;
function emptyDir(dir, skip) {
const skipInDir = [];
let nested = null;
if (skip?.length) {
for (const file of skip) {
if (path$n.dirname(file) !== ".") {
const matched = splitFirstDirRE.exec(file);
if (matched) {
nested ??= /* @__PURE__ */ new Map();
const [, nestedDir, skipPath] = matched;
let nestedSkip = nested.get(nestedDir);
if (!nestedSkip) {
nestedSkip = [];
nested.set(nestedDir, nestedSkip);
}
if (!nestedSkip.includes(skipPath)) {
nestedSkip.push(skipPath);
}
}
} else {
skipInDir.push(file);
}
}
}
for (const file of fs__default.readdirSync(dir)) {
if (skipInDir.includes(file)) {
continue;
}
if (nested?.has(file)) {
emptyDir(path$n.resolve(dir, file), nested.get(file));
} else {
fs__default.rmSync(path$n.resolve(dir, file), { recursive: true, force: true });
}
}
}
function copyDir(srcDir, destDir) {
fs__default.mkdirSync(destDir, { recursive: true });
for (const file of fs__default.readdirSync(srcDir)) {
const srcFile = path$n.resolve(srcDir, file);
if (srcFile === destDir) {
continue;
}
const destFile = path$n.resolve(destDir, file);
const stat = fs__default.statSync(srcFile);
if (stat.isDirectory()) {
copyDir(srcFile, destFile);
} else {
fs__default.copyFileSync(srcFile, destFile);
}
}
}
const ERR_SYMLINK_IN_RECURSIVE_READDIR = "ERR_SYMLINK_IN_RECURSIVE_READDIR";
async function recursiveReaddir(dir) {
if (!fs__default.existsSync(dir)) {
return [];
}
let dirents;
try {
dirents = await fsp.readdir(dir, { withFileTypes: true });
} catch (e) {
if (e.code === "EACCES") {
return [];
}
throw e;
}
if (dirents.some((dirent) => dirent.isSymbolicLink())) {
const err = new Error(
"Symbolic links are not supported in recursiveReaddir"
);
err.code = ERR_SYMLINK_IN_RECURSIVE_READDIR;
throw err;
}
const files = await Promise.all(
dirents.map((dirent) => {
const res = path$n.resolve(dir, dirent.name);
return dirent.isDirectory() ? recursiveReaddir(res) : normalizePath$3(res);
})
);
return files.flat(1);
}
let safeRealpathSync = isWindows$3 ? windowsSafeRealPathSync : fs__default.realpathSync.native;
const windowsNetworkMap = /* @__PURE__ */ new Map();
function windowsMappedRealpathSync(path2) {
const realPath = fs__default.realpathSync.native(path2);
if (realPath.startsWith("\\\\")) {
for (const [network, volume] of windowsNetworkMap) {
if (realPath.startsWith(network)) return realPath.replace(network, volume);
}
}
return realPath;
}
const parseNetUseRE = /^\w* +(\w:) +([^ ]+)\s/;
let firstSafeRealPathSyncRun = false;
function windowsSafeRealPathSync(path2) {
if (!firstSafeRealPathSyncRun) {
optimizeSafeRealPathSync();
firstSafeRealPathSyncRun = true;
}
return fs__default.realpathSync(path2);
}
function optimizeSafeRealPathSync() {
const nodeVersion = process.versions.node.split(".").map(Number);
if (nodeVersion[0] < 18 || nodeVersion[0] === 18 && nodeVersion[1] < 10) {
safeRealpathSync = fs__default.realpathSync;
return;
}
try {
fs__default.realpathSync.native(path$n.resolve("./"));
} catch (error) {
if (error.message.includes("EISDIR: illegal operation on a directory")) {
safeRealpathSync = fs__default.realpathSync;
return;
}
}
exec("net use", (error, stdout) => {
if (error) return;
const lines = stdout.split("\n");
for (const line of lines) {
const m = parseNetUseRE.exec(line);
if (m) windowsNetworkMap.set(m[2], m[1]);
}
if (windowsNetworkMap.size === 0) {
safeRealpathSync = fs__default.realpathSync.native;
} else {
safeRealpathSync = windowsMappedRealpathSync;
}
});
}
function ensureWatchedFile(watcher, file, root) {
if (file && // only need to watch if out of root
!file.startsWith(withTrailingSlash(root)) && // some rollup plugins use null bytes for private resolved Ids
!file.includes("\0") && fs__default.existsSync(file)) {
watcher.add(path$n.resolve(file));
}
}
const escapedSpaceCharacters = /(?: |\\t|\\n|\\f|\\r)+/g;
const imageSetUrlRE = /^(?:[\w\-]+\(.*?\)|'.*?'|".*?"|\S*)/;
function joinSrcset(ret) {
return ret.map(({ url, descriptor }) => url + (descriptor ? ` ${descriptor}` : "")).join(", ");
}
function splitSrcSetDescriptor(srcs) {
return splitSrcSet(srcs).map((s) => {
const src = s.replace(escapedSpaceCharacters, " ").trim();
const url = imageSetUrlRE.exec(src)?.[0] ?? "";
return {
url,
descriptor: src.slice(url.length).trim()
};
}).filter(({ url }) => !!url);
}
function processSrcSet(srcs, replacer) {
return Promise.all(
splitSrcSetDescriptor(srcs).map(async ({ url, descriptor }) => ({
url: await replacer({ url, descriptor }),
descriptor
}))
).then(joinSrcset);
}
function processSrcSetSync(srcs, replacer) {
return joinSrcset(
splitSrcSetDescriptor(srcs).map(({ url, descriptor }) => ({
url: replacer({ url, descriptor }),
descriptor
}))
);
}
const cleanSrcSetRE = /(?:url|image|gradient|cross-fade)\([^)]*\)|"([^"]|(?<=\\)")*"|'([^']|(?<=\\)')*'|data:\w+\/[\w.+\-]+;base64,[\w+/=]+|\?\S+,/g;
function splitSrcSet(srcs) {
const parts = [];
const cleanedSrcs = srcs.replace(cleanSrcSetRE, blankReplacer);
let startIndex = 0;
let splitIndex;
do {
splitIndex = cleanedSrcs.indexOf(",", startIndex);
parts.push(
srcs.slice(startIndex, splitIndex !== -1 ? splitIndex : void 0)
);
startIndex = splitIndex + 1;
} while (splitIndex !== -1);
return parts;
}
const windowsDriveRE = /^[A-Z]:/;
const replaceWindowsDriveRE = /^([A-Z]):\//;
const linuxAbsolutePathRE = /^\/[^/]/;
function escapeToLinuxLikePath(path2) {
if (windowsDriveRE.test(path2)) {
return path2.replace(replaceWindowsDriveRE, "/windows/$1/");
}
if (linuxAbsolutePathRE.test(path2)) {
return `/linux${path2}`;
}
return path2;
}
const revertWindowsDriveRE = /^\/windows\/([A-Z])\//;
function unescapeToLinuxLikePath(path2) {
if (path2.startsWith("/linux/")) {
return path2.slice("/linux".length);
}
if (path2.startsWith("/windows/")) {
return path2.replace(revertWindowsDriveRE, "$1:/");
}
return path2;
}
const nullSourceMap = {
names: [],
sources: [],
mappings: "",
version: 3
};
function combineSourcemaps(filename, sourcemapList) {
if (sourcemapList.length === 0 || sourcemapList.every((m) => m.sources.length === 0)) {
return { ...nullSourceMap };
}
sourcemapList = sourcemapList.map((sourcemap) => {
const newSourcemaps = { ...sourcemap };
newSourcemaps.sources = sourcemap.sources.map(
(source) => source ? escapeToLinuxLikePath(source) : null
);
if (sourcemap.sourceRoot) {
newSourcemaps.sourceRoot = escapeToLinuxLikePath(sourcemap.sourceRoot);
}
return newSourcemaps;
});
let map;
let mapIndex = 1;
const useArrayInterface = sourcemapList.slice(0, -1).find((m) => m.sources.length !== 1) === void 0;
if (useArrayInterface) {
map = remapping(sourcemapList, () => null);
} else {
map = remapping(sourcemapList[0], function loader(sourcefile) {
const mapForSources = sourcemapList.slice(mapIndex).find((s) => s.sources.includes(sourcefile));
if (mapForSources) {
mapIndex++;
return mapForSources;
}
return null;
});
}
if (!map.file) {
delete map.file;
}
map.sources = map.sources.map(
(source) => source ? unescapeToLinuxLikePath(source) : source
);
map.file = filename;
return map;
}
function unique(arr) {
return Array.from(new Set(arr));
}
async function getLocalhostAddressIfDiffersFromDNS() {
const [nodeResult, dnsResult] = await Promise.all([
promises.lookup("localhost"),
promises.lookup("localhost", { verbatim: true })
]);
const isSame = nodeResult.family === dnsResult.family && nodeResult.address === dnsResult.address;
return isSame ? void 0 : nodeResult.address;
}
function diffDnsOrderChange(oldUrls, newUrls) {
return !(oldUrls === newUrls || oldUrls && newUrls && arrayEqual(oldUrls.local, newUrls.local) && arrayEqual(oldUrls.network, newUrls.network));
}
async function resolveHostname(optionsHost) {
let host;
if (optionsHost === void 0 || optionsHost === false) {
host = "localhost";
} else if (optionsHost === true) {
host = void 0;
} else {
host = optionsHost;
}
let name = host === void 0 || wildcardHosts.has(host) ? "localhost" : host;
if (host === "localhost") {
const localhostAddr = await getLocalhostAddressIfDiffersFromDNS();
if (localhostAddr) {
name = localhostAddr;
}
}
return { host, name };
}
async function resolveServerUrls(server, options, config) {
const address = server.address();
const isAddressInfo = (x) => x?.address;
if (!isAddressInfo(address)) {
return { local: [], network: [] };
}
const local = [];
const network = [];
const hostname = await resolveHostname(options.host);
const protocol = options.https ? "https" : "http";
const port = address.port;
const base = config.rawBase === "./" || config.rawBase === "" ? "/" : config.rawBase;
if (hostname.host !== void 0 && !wildcardHosts.has(hostname.host)) {
let hostnameName = hostname.name;
if (hostnameName.includes(":")) {
hostnameName = `[${hostnameName}]`;
}
const address2 = `${protocol}://${hostnameName}:${port}${base}`;
if (loopbackHosts.has(hostname.host)) {
local.push(address2);
} else {
network.push(address2);
}
} else {
Object.values(os$5.networkInterfaces()).flatMap((nInterface) => nInterface ?? []).filter(
(detail) => detail && detail.address && (detail.family === "IPv4" || // @ts-expect-error Node 18.0 - 18.3 returns number
detail.family === 4)
).forEach((detail) => {
let host = detail.address.replace("127.0.0.1", hostname.name);
if (host.includes(":")) {
host = `[${host}]`;
}
const url = `${protocol}://${host}:${port}${base}`;
if (detail.address.includes("127.0.0.1")) {
local.push(url);
} else {
network.push(url);
}
});
}
return { local, network };
}
function arraify(target) {
return Array.isArray(target) ? target : [target];
}
const multilineCommentsRE = /\/\*[^*]*\*+(?:[^/*][^*]*\*+)*\//g;
const singlelineCommentsRE = /\/\/.*/g;
const requestQuerySplitRE = /\?(?!.*[/|}])/;
const requestQueryMaybeEscapedSplitRE = /\\?\?(?!.*[/|}])/;
const blankReplacer = (match) => " ".repeat(match.length);
function getHash(text, length = 8) {
const h = createHash$2("sha256").update(text).digest("hex").substring(0, length);
if (length <= 64) return h;
return h.padEnd(length, "_");
}
const _dirname = path$n.dirname(fileURLToPath(import.meta.url));
const requireResolveFromRootWithFallback = (root, id) => {
const found = resolvePackageData(id, root) || resolvePackageData(id, _dirname);
if (!found) {
const error = new Error(`${JSON.stringify(id)} not found.`);
error.code = "MODULE_NOT_FOUND";
throw error;
}
return _require$1.resolve(id, { paths: [root, _dirname] });
};
function emptyCssComments(raw) {
return raw.replace(multilineCommentsRE, blankReplacer);
}
function backwardCompatibleWorkerPlugins(plugins) {
if (Array.isArray(plugins)) {
return plugins;
}
if (typeof plugins === "function") {
return plugins();
}
return [];
}
function mergeConfigRecursively(defaults, overrides, rootPath) {
const merged = { ...defaults };
for (const key in overrides) {
const value = overrides[key];
if (value == null) {
continue;
}
const existing = merged[key];
if (existing == null) {
merged[key] = value;
continue;
}
if (key === "alias" && (rootPath === "resolve" || rootPath === "")) {
merged[key] = mergeAlias(existing, value);
continue;
} else if (key === "assetsInclude" && rootPath === "") {
merged[key] = [].concat(existing, value);
continue;
} else if (key === "noExternal" && rootPath === "ssr" && (existing === true || value === true)) {
merged[key] = true;
continue;
} else if (key === "plugins" && rootPath === "worker") {
merged[key] = () => [
...backwardCompatibleWorkerPlugins(existing),
...backwardCompatibleWorkerPlugins(value)
];
continue;
} else if (key === "server" && rootPath === "server.hmr") {
merged[key] = value;
continue;
}
if (Array.isArray(existing) || Array.isArray(value)) {
merged[key] = [...arraify(existing), ...arraify(value)];
continue;
}
if (isObject$1(existing) && isObject$1(value)) {
merged[key] = mergeConfigRecursively(
existing,
value,
rootPath ? `${rootPath}.${key}` : key
);
continue;
}
merged[key] = value;
}
return merged;
}
function mergeConfig(defaults, overrides, isRoot = true) {
if (typeof defaults === "function" || typeof overrides === "function") {
throw new Error(`Cannot merge config in form of callback`);
}
return mergeConfigRecursively(defaults, overrides, isRoot ? "" : ".");
}
function mergeAlias(a, b) {
if (!a) return b;
if (!b) return a;
if (isObject$1(a) && isObject$1(b)) {
return { ...a, ...b };
}
return [...normalizeAlias(b), ...normalizeAlias(a)];
}
function normalizeAlias(o = []) {
return Array.isArray(o) ? o.map(normalizeSingleAlias) : Object.keys(o).map(
(find) => normalizeSingleAlias({
find,
replacement: o[find]
})
);
}
function normalizeSingleAlias({
find,
replacement,
customResolver
}) {
if (typeof find === "string" && find[find.length - 1] === "/" && replacement[replacement.length - 1] === "/") {
find = find.slice(0, find.length - 1);
replacement = replacement.slice(0, replacement.length - 1);
}
const alias = {
find,
replacement
};
if (customResolver) {
alias.customResolver = customResolver;
}
return alias;
}
function transformStableResult(s, id, config) {
return {
code: s.toString(),
map: config.command === "build" && config.build.sourcemap ? s.generateMap({ hires: "boundary", source: id }) : null
};
}
async function asyncFlatten(arr) {
do {
arr = (await Promise.all(arr)).flat(Infinity);
} while (arr.some((v) => v?.then));
return arr;
}
function stripBomTag(content) {
if (content.charCodeAt(0) === 65279) {
return content.slice(1);
}
return content;
}
const windowsDrivePathPrefixRE = /^[A-Za-z]:[/\\]/;
const isNonDriveRelativeAbsolutePath = (p) => {
if (!isWindows$3) return p[0] === "/";
return windowsDrivePathPrefixRE.test(p);
};
function shouldServeFile(filePath, root) {
if (!isCaseInsensitiveFS) return true;
return hasCorrectCase(filePath, root);
}
function hasCorrectCase(file, assets) {
if (file === assets) return true;
const parent = path$n.dirname(file);
if (fs__default.readdirSync(parent).includes(path$n.basename(file))) {
return hasCorrectCase(parent, assets);
}
return false;
}
function joinUrlSegments(a, b) {
if (!a || !b) {
return a || b || "";
}
if (a[a.length - 1] === "/") {
a = a.substring(0, a.length - 1);
}
if (b[0] !== "/") {
b = "/" + b;
}
return a + b;
}
function removeLeadingSlash(str) {
return str[0] === "/" ? str.slice(1) : str;
}
function stripBase(path2, base) {
if (path2 === base) {
return "/";
}
const devBase = withTrailingSlash(base);
return path2.startsWith(devBase) ? path2.slice(devBase.length - 1) : path2;
}
function arrayEqual(a, b) {
if (a === b) return true;
if (a.length !== b.length) return false;
for (let i = 0; i < a.length; i++) {
if (a[i] !== b[i]) return false;
}
return true;
}
function evalValue(rawValue) {
const fn = new Function(`
var console, exports, global, module, process, require
return (
${rawValue}
)
`);
return fn();
}
function getNpmPackageName(importPath) {
const parts = importPath.split("/");
if (parts[0][0] === "@") {
if (!parts[1]) return null;
return `${parts[0]}/${parts[1]}`;
} else {
return parts[0];
}
}
const escapeRegexRE = /[-/\\^$*+?.()|[\]{}]/g;
function escapeRegex(str) {
return str.replace(escapeRegexRE, "\\$&");
}
function getPackageManagerCommand(type = "install") {
const packageManager = process.env.npm_config_user_agent?.split(" ")[0].split("/")[0] || "npm";
switch (type) {
case "install":
return packageManager === "npm" ? "npm install" : `${packageManager} add`;
case "uninstall":
return packageManager === "npm" ? "npm uninstall" : `${packageManager} remove`;
case "update":
return packageManager === "yarn" ? "yarn upgrade" : `${packageManager} update`;
default:
throw new TypeError(`Unknown command type: ${type}`);
}
}
function isDevServer(server) {
return "pluginContainer" in server;
}
function promiseWithResolvers() {
let resolve;
let reject;
const promise = new Promise((_resolve, _reject) => {
resolve = _resolve;
reject = _reject;
});
return { promise, resolve, reject };
}
function createSerialPromiseQueue() {
let previousTask;
return {
async run(f) {
const thisTask = f();
const depTasks = Promise.all([previousTask, thisTask]);
previousTask = depTasks;
const [, result] = await depTasks;
if (previousTask === depTasks) {
previousTask = void 0;
}
return result;
}
};
}
function sortObjectKeys(obj) {
const sorted = {};
for (const key of Object.keys(obj).sort()) {
sorted[key] = obj[key];
}
return sorted;
}
function displayTime(time) {
if (time < 1e3) {
return `${time}ms`;
}
time = time / 1e3;
if (time < 60) {
return `${time.toFixed(2)}s`;
}
const mins = parseInt((time / 60).toString());
const seconds = time % 60;
return `${mins}m${seconds < 1 ? "" : ` ${seconds.toFixed(0)}s`}`;
}
function encodeURIPath(uri) {
if (uri.startsWith("data:")) return uri;
const filePath = cleanUrl(uri);
const postfix = filePath !== uri ? uri.slice(filePath.length) : "";
return encodeURI(filePath) + postfix;
}
function partialEncodeURIPath(uri) {
if (uri.startsWith("data:")) return uri;
const filePath = cleanUrl(uri);
const postfix = filePath !== uri ? uri.slice(filePath.length) : "";
return filePath.replaceAll("%", "%25") + postfix;
}
const setupSIGTERMListener = (callback) => {
process.once("SIGTERM", callback);
if (process.env.CI !== "true") {
process.stdin.on("end", callback);
}
};
const teardownSIGTERMListener = (callback) => {
process.off("SIGTERM", callback);
if (process.env.CI !== "true") {
process.stdin.off("end", callback);
}
};
const LogLevels = {
silent: 0,
error: 1,
warn: 2,
info: 3
};
let lastType;
let lastMsg;
let sameCount = 0;
function clearScreen() {
const repeatCount = process.stdout.rows - 2;
const blank = repeatCount > 0 ? "\n".repeat(repeatCount) : "";
console.log(blank);
readline.cursorTo(process.stdout, 0, 0);
readline.clearScreenDown(process.stdout);
}
let timeFormatter;
function getTimeFormatter() {
timeFormatter ??= new Intl.DateTimeFormat(void 0, {
hour: "numeric",
minute: "numeric",
second: "numeric"
});
return timeFormatter;
}
function createLogger(level = "info", options = {}) {
if (options.customLogger) {
return options.customLogger;
}
const loggedErrors = /* @__PURE__ */ new WeakSet();
const { prefix = "[vite]", allowClearScreen = true } = options;
const thresh = LogLevels[level];
const canClearScreen = allowClearScreen && process.stdout.isTTY && !process.env.CI;
const clear = canClearScreen ? clearScreen : () => {
};
function format(type, msg, options2 = {}) {
if (options2.timestamp) {
let tag = "";
if (type === "info") {
tag = colors$1.cyan(colors$1.bold(prefix));
} else if (type === "warn") {
tag = colors$1.yellow(colors$1.bold(prefix));
} else {
tag = colors$1.red(colors$1.bold(prefix));
}
return `${colors$1.dim(getTimeFormatter().format(/* @__PURE__ */ new Date()))} ${tag} ${msg}`;
} else {
return msg;
}
}
function output(type, msg, options2 = {}) {
if (thresh >= LogLevels[type]) {
const method = type === "info" ? "log" : type;
if (options2.error) {
loggedErrors.add(options2.error);
}
if (canClearScreen) {
if (type === lastType && msg === lastMsg) {
sameCount++;
clear();
console[method](
format(type, msg, options2),
colors$1.yellow(`(x${sameCount + 1})`)
);
} else {
sameCount = 0;
lastMsg = msg;
lastType = type;
if (options2.clear) {
clear();
}
console[method](format(type, msg, options2));
}
} else {
console[method](format(type, msg, options2));
}
}
}
const warnedMessages = /* @__PURE__ */ new Set();
const logger = {
hasWarned: false,
info(msg, opts) {
output("info", msg, opts);
},
warn(msg, opts) {
logger.hasWarned = true;
output("warn", msg, opts);
},
warnOnce(msg, opts) {
if (warnedMessages.has(msg)) return;
logger.hasWarned = true;
output("warn", msg, opts);
warnedMessages.add(msg);
},
error(msg, opts) {
logger.hasWarned = true;
output("error", msg, opts);
},
clearScreen(type) {
if (thresh >= LogLevels[type]) {
clear();
}
},
hasErrorLogged(error) {
return loggedErrors.has(error);
}
};
return logger;
}
function printServerUrls(urls, optionsHost, info) {
const colorUrl = (url) => colors$1.cyan(url.replace(/:(\d+)\//, (_, port) => `:${colors$1.bold(port)}/`));
for (const url of urls.local) {
info(` ${colors$1.green("\u279C")} ${colors$1.bold("Local")}: ${colorUrl(url)}`);
}
for (const url of urls.network) {
info(` ${colors$1.green("\u279C")} ${colors$1.bold("Network")}: ${colorUrl(url)}`);
}
if (urls.network.length === 0 && optionsHost === void 0) {
info(
colors$1.dim(` ${colors$1.green("\u279C")} ${colors$1.bold("Network")}: use `) + colors$1.bold("--host") + colors$1.dim(" to expose")
);
}
}
const groups = [
{ name: "Assets", color: colors$1.green },
{ name: "CSS", color: colors$1.magenta },
{ name: "JS", color: colors$1.cyan }
];
const COMPRESSIBLE_ASSETS_RE = /\.(?:html|json|svg|txt|xml|xhtml)$/;
function buildReporterPlugin(config) {
const compress = promisify$4(gzip);
const chunkLimit = config.build.chunkSizeWarningLimit;
const numberFormatter = new Intl.NumberFormat("en", {
maximumFractionDigits: 2,
minimumFractionDigits: 2
});
const displaySize = (bytes) => {
return `${numberFormatter.format(bytes / 1e3)} kB`;
};
const tty = process.stdout.isTTY && !process.env.CI;
const shouldLogInfo = LogLevels[config.logLevel || "info"] >= LogLevels.info;
let hasTransformed = false;
let hasRenderedChunk = false;
let hasCompressChunk = false;
let transformedCount = 0;
let chunkCount = 0;
let compressedCount = 0;
async function getCompressedSize(code) {
if (config.build.ssr || !config.build.reportCompressedSize) {
return null;
}
if (shouldLogInfo && !hasCompressChunk) {
if (!tty) {
config.logger.info("computing gzip size...");
} else {
writeLine("computing gzip size (0)...");
}
hasCompressChunk = true;
}
const compressed = await compress(
typeof code === "string" ? code : Buffer.from(code)
);
compressedCount++;
if (shouldLogInfo && tty) {
writeLine(`computing gzip size (${compressedCount})...`);
}
return compressed.length;
}
const logTransform = throttle((id) => {
writeLine(
`transforming (${transformedCount}) ${colors$1.dim(
path$n.relative(config.root, id)
)}`
);
});
return {
name: "vite:reporter",
transform(_, id) {
transformedCount++;
if (shouldLogInfo) {
if (!tty) {
if (!hasTransformed) {
config.logger.info(`transforming...`);
}
} else {
if (id.includes(`?`)) return;
logTransform(id);
}
hasTransformed = true;
}
return null;
},
buildStart() {
transformedCount = 0;
},
buildEnd() {
if (shouldLogInfo) {
if (tty) {
clearLine$1();
}
config.logger.info(
`${colors$1.green(`\u2713`)} ${transformedCount} modules transformed.`
);
}
},
renderStart() {
chunkCount = 0;
compressedCount = 0;
},
renderChunk(code, chunk, options) {
if (!options.inlineDynamicImports) {
for (const id of chunk.moduleIds) {
const module = this.getModuleInfo(id);
if (!module) continue;
if (module.importers.length && module.dynamicImporters.length) {
const detectedIneffectiveDynamicImport = module.dynamicImporters.some(
(id2) => !isInNodeModules$1(id2) && chunk.moduleIds.includes(id2)
);
if (detectedIneffectiveDynamicImport) {
this.warn(
`
(!) ${module.id} is dynamically imported by ${module.dynamicImporters.join(
", "
)} but also statically imported by ${module.importers.join(
", "
)}, dynamic import will not move module into another chunk.
`
);
}
}
}
}
chunkCount++;
if (shouldLogInfo) {
if (!tty) {
if (!hasRenderedChunk) {
config.logger.info("rendering chunks...");
}
} else {
writeLine(`rendering chunks (${chunkCount})...`);
}
hasRenderedChunk = true;
}
return null;
},
generateBundle() {
if (shouldLogInfo && tty) clearLine$1();
},
async writeBundle({ dir: outDir }, output) {
let hasLargeChunks = false;
if (shouldLogInfo) {
const entries = (await Promise.all(
Object.values(output).map(
async (chunk) => {
if (chunk.type === "chunk") {
return {
name: chunk.fileName,
group: "JS",
size: chunk.code.length,
compressedSize: await getCompressedSize(chunk.code),
mapSize: chunk.map ? chunk.map.toString().length : null
};
} else {
if (chunk.fileName.endsWith(".map")) return null;
const isCSS = chunk.fileName.endsWith(".css");
const isCompressible = isCSS || COMPRESSIBLE_ASSETS_RE.test(chunk.fileName);
return {
name: chunk.fileName,
group: isCSS ? "CSS" : "Assets",
size: chunk.source.length,
mapSize: null,
// Rollup doesn't support CSS maps?
compressedSize: isCompressible ? await getCompressedSize(chunk.source) : null
};
}
}
)
)).filter(isDefined);
if (tty) clearLine$1();
let longest = 0;
let biggestSize = 0;
let biggestMap = 0;
let biggestCompressSize = 0;
for (const entry of entries) {
if (entry.name.length > longest) longest = entry.name.length;
if (entry.size > biggestSize) biggestSize = entry.size;
if (entry.mapSize && entry.mapSize > biggestMap) {
biggestMap = entry.mapSize;
}
if (entry.compressedSize && entry.compressedSize > biggestCompressSize) {
biggestCompressSize = entry.compressedSize;
}
}
const sizePad = displaySize(biggestSize).length;
const mapPad = displaySize(biggestMap).length;
const compressPad = displaySize(biggestCompressSize).length;
const relativeOutDir = normalizePath$3(
path$n.relative(
config.root,
path$n.resolve(config.root, outDir ?? config.build.outDir)
)
);
const assetsDir = path$n.join(config.build.assetsDir, "/");
for (const group of groups) {
const filtered = entries.filter((e) => e.group === group.name);
if (!filtered.length) continue;
for (const entry of filtered.sort((a, z) => a.size - z.size)) {
const isLarge = group.name === "JS" && entry.size / 1e3 > chunkLimit;
if (isLarge) hasLargeChunks = true;
const sizeColor = isLarge ? colors$1.yellow : colors$1.dim;
let log = colors$1.dim(withTrailingSlash(relativeOutDir));
log += !config.build.lib && entry.name.startsWith(withTrailingSlash(assetsDir)) ? colors$1.dim(assetsDir) + group.color(
entry.name.slice(assetsDir.length).padEnd(longest + 2 - assetsDir.length)
) : group.color(entry.name.padEnd(longest + 2));
log += colors$1.bold(
sizeColor(displaySize(entry.size).padStart(sizePad))
);
if (entry.compressedSize) {
log += colors$1.dim(
` \u2502 gzip: ${displaySize(entry.compressedSize).padStart(
compressPad
)}`
);
}
if (entry.mapSize) {
log += colors$1.dim(
` \u2502 map: ${displaySize(entry.mapSize).padStart(mapPad)}`
);
}
config.logger.info(log);
}
}
} else {
hasLargeChunks = Object.values(output).some((chunk) => {
return chunk.type === "chunk" && chunk.code.length / 1e3 > chunkLimit;
});
}
if (hasLargeChunks && config.build.minify && !config.build.lib && !config.build.ssr) {
config.logger.warn(
colors$1.yellow(
`
(!) Some chunks are larger than ${chunkLimit} kB after minification. Consider:
- Using dynamic import() to code-split the application
- Use build.rollupOptions.output.manualChunks to improve chunking: https://rollupjs.org/configuration-options/#output-manualchunks
- Adjust chunk size limit for this warning via build.chunkSizeWarningLimit.`
)
);
}
}
};
}
function writeLine(output) {
clearLine$1();
if (output.length < process.stdout.columns) {
process.stdout.write(output);
} else {
process.stdout.write(output.substring(0, process.stdout.columns - 1));
}
}
function clearLine$1() {
process.stdout.clearLine(0);
process.stdout.cursorTo(0);
}
function throttle(fn) {
let timerHandle = null;
return (...args) => {
if (timerHandle) return;
fn(...args);
timerHandle = setTimeout(() => {
timerHandle = null;
}, 100);
};
}
const POSIX_SEP_RE = new RegExp('\\' + path$n.posix.sep, 'g');
const NATIVE_SEP_RE = new RegExp('\\' + path$n.sep, 'g');
/** @type {Map<string,RegExp>}*/
const PATTERN_REGEX_CACHE = new Map();
const GLOB_ALL_PATTERN = `**/*`;
const TS_EXTENSIONS = ['.ts', '.tsx', '.mts', '.cts'];
const JS_EXTENSIONS = ['.js', '.jsx', '.mjs', '.cjs'];
const TSJS_EXTENSIONS = TS_EXTENSIONS.concat(JS_EXTENSIONS);
const TS_EXTENSIONS_RE_GROUP = `\\.(?:${TS_EXTENSIONS.map((ext) => ext.substring(1)).join('|')})`;
const TSJS_EXTENSIONS_RE_GROUP = `\\.(?:${TSJS_EXTENSIONS.map((ext) => ext.substring(1)).join(
'|'
)})`;
const IS_POSIX = path$n.posix.sep === path$n.sep;
/**
* @template T
* @returns {{resolve:(result:T)=>void, reject:(error:any)=>void, promise: Promise<T>}}
*/
function makePromise() {
let resolve, reject;
const promise = new Promise((res, rej) => {
resolve = res;
reject = rej;
});
return { promise, resolve, reject };
}
/**
* @param {string} filename
* @param {import('./cache.js').TSConfckCache} [cache]
* @returns {Promise<string|void>}
*/
async function resolveTSConfigJson(filename, cache) {
if (path$n.extname(filename) !== '.json') {
return; // ignore files that are not json
}
const tsconfig = path$n.resolve(filename);
if (cache && (cache.hasParseResult(tsconfig) || cache.hasParseResult(filename))) {
return tsconfig;
}
return promises$1.stat(tsconfig).then((stat) => {
if (stat.isFile() || stat.isFIFO()) {
return tsconfig;
} else {
throw new Error(`${filename} exists but is not a regular file.`);
}
});
}
/**
*
* @param {string} dir an absolute directory path
* @returns {boolean} if dir path includes a node_modules segment
*/
const isInNodeModules = IS_POSIX
? (dir) => dir.includes('/node_modules/')
: (dir) => dir.match(/[/\\]node_modules[/\\]/);
/**
* convert posix separator to native separator
*
* eg.
* windows: C:/foo/bar -> c:\foo\bar
* linux: /foo/bar -> /foo/bar
*
* @param {string} filename with posix separators
* @returns {string} filename with native separators
*/
const posix2native = IS_POSIX
? (filename) => filename
: (filename) => filename.replace(POSIX_SEP_RE, path$n.sep);
/**
* convert native separator to posix separator
*
* eg.
* windows: C:\foo\bar -> c:/foo/bar
* linux: /foo/bar -> /foo/bar
*
* @param {string} filename - filename with native separators
* @returns {string} filename with posix separators
*/
const native2posix = IS_POSIX
? (filename) => filename
: (filename) => filename.replace(NATIVE_SEP_RE, path$n.posix.sep);
/**
* converts params to native separator, resolves path and converts native back to posix
*
* needed on windows to handle posix paths in tsconfig
*
* @param dir {string|null} directory to resolve from
* @param filename {string} filename or pattern to resolve
* @returns string
*/
const resolve2posix = IS_POSIX
? (dir, filename) => (dir ? path$n.resolve(dir, filename) : path$n.resolve(filename))
: (dir, filename) =>
native2posix(
dir
? path$n.resolve(posix2native(dir), posix2native(filename))
: path$n.resolve(posix2native(filename))
);
/**
*
* @param {import('./public.d.ts').TSConfckParseResult} result
* @param {import('./public.d.ts').TSConfckParseOptions} [options]
* @returns {string[]}
*/
function resolveReferencedTSConfigFiles(result, options) {
const dir = path$n.dirname(result.tsconfigFile);
return result.tsconfig.references.map((ref) => {
const refPath = ref.path.endsWith('.json')
? ref.path
: path$n.join(ref.path, options?.configName ?? 'tsconfig.json');
return resolve2posix(dir, refPath);
});
}
/**
* @param {string} filename
* @param {import('./public.d.ts').TSConfckParseResult} result
* @returns {import('./public.d.ts').TSConfckParseResult}
*/
function resolveSolutionTSConfig(filename, result) {
const allowJs = result.tsconfig.compilerOptions?.allowJs;
const extensions = allowJs ? TSJS_EXTENSIONS : TS_EXTENSIONS;
if (
result.referenced &&
extensions.some((ext) => filename.endsWith(ext)) &&
!isIncluded(filename, result)
) {
const solutionTSConfig = result.referenced.find((referenced) =>
isIncluded(filename, referenced)
);
if (solutionTSConfig) {
return solutionTSConfig;
}
}
return result;
}
/**
*
* @param {string} filename
* @param {import('./public.d.ts').TSConfckParseResult} result
* @returns {boolean}
*/
function isIncluded(filename, result) {
const dir = native2posix(path$n.dirname(result.tsconfigFile));
const files = (result.tsconfig.files || []).map((file) => resolve2posix(dir, file));
const absoluteFilename = resolve2posix(null, filename);
if (files.includes(filename)) {
return true;
}
const allowJs = result.tsconfig.compilerOptions?.allowJs;
const isIncluded = isGlobMatch(
absoluteFilename,
dir,
result.tsconfig.include || (result.tsconfig.files ? [] : [GLOB_ALL_PATTERN]),
allowJs
);
if (isIncluded) {
const isExcluded = isGlobMatch(absoluteFilename, dir, result.tsconfig.exclude || [], allowJs);
return !isExcluded;
}
return false;
}
/**
* test filenames agains glob patterns in tsconfig
*
* @param filename {string} posix style abolute path to filename to test
* @param dir {string} posix style absolute path to directory of tsconfig containing patterns
* @param patterns {string[]} glob patterns to match against
* @param allowJs {boolean} allowJs setting in tsconfig to include js extensions in checks
* @returns {boolean} true when at least one pattern matches filename
*/
function isGlobMatch(filename, dir, patterns, allowJs) {
const extensions = allowJs ? TSJS_EXTENSIONS : TS_EXTENSIONS;
return patterns.some((pattern) => {
// filename must end with part of pattern that comes after last wildcard
let lastWildcardIndex = pattern.length;
let hasWildcard = false;
for (let i = pattern.length - 1; i > -1; i--) {
if (pattern[i] === '*' || pattern[i] === '?') {
lastWildcardIndex = i;
hasWildcard = true;
break;
}
}
// if pattern does not end with wildcard, filename must end with pattern after last wildcard
if (
lastWildcardIndex < pattern.length - 1 &&
!filename.endsWith(pattern.slice(lastWildcardIndex + 1))
) {
return false;
}
// if pattern ends with *, filename must end with a default extension
if (pattern.endsWith('*') && !extensions.some((ext) => filename.endsWith(ext))) {
return false;
}
// for **/* , filename must start with the dir
if (pattern === GLOB_ALL_PATTERN) {
return filename.startsWith(`${dir}/`);
}
const resolvedPattern = resolve2posix(dir, pattern);
// filename must start with part of pattern that comes before first wildcard
let firstWildcardIndex = -1;
for (let i = 0; i < resolvedPattern.length; i++) {
if (resolvedPattern[i] === '*' || resolvedPattern[i] === '?') {
firstWildcardIndex = i;
hasWildcard = true;
break;
}
}
if (
firstWildcardIndex > 1 &&
!filename.startsWith(resolvedPattern.slice(0, firstWildcardIndex - 1))
) {
return false;
}
// if no wildcard in pattern, filename must be equal to resolved pattern
if (!hasWildcard) {
return filename === resolvedPattern;
}
// complex pattern, use regex to check it
if (PATTERN_REGEX_CACHE.has(resolvedPattern)) {
return PATTERN_REGEX_CACHE.get(resolvedPattern).test(filename);
}
const regex = pattern2regex(resolvedPattern, allowJs);
PATTERN_REGEX_CACHE.set(resolvedPattern, regex);
return regex.test(filename);
});
}
/**
* @param {string} resolvedPattern
* @param {boolean} allowJs
* @returns {RegExp}
*/
function pattern2regex(resolvedPattern, allowJs) {
let regexStr = '^';
for (let i = 0; i < resolvedPattern.length; i++) {
const char = resolvedPattern[i];
if (char === '?') {
regexStr += '[^\\/]';
continue;
}
if (char === '*') {
if (resolvedPattern[i + 1] === '*' && resolvedPattern[i + 2] === '/') {
i += 2;
regexStr += '(?:[^\\/]*\\/)*'; // zero or more path segments
continue;
}
regexStr += '[^\\/]*';
continue;
}
if ('/.+^${}()|[]\\'.includes(char)) {
regexStr += `\\`;
}
regexStr += char;
}
// add known file endings if pattern ends on *
if (resolvedPattern.endsWith('*')) {
regexStr += allowJs ? TSJS_EXTENSIONS_RE_GROUP : TS_EXTENSIONS_RE_GROUP;
}
regexStr += '$';
return new RegExp(regexStr);
}
/**
* replace tokens like ${configDir}
* @param {any} tsconfig
* @param {string} configDir
* @returns {any}
*/
function replaceTokens(tsconfig, configDir) {
return JSON.parse(
JSON.stringify(tsconfig)
// replace ${configDir}, accounting for rebaseRelative emitted ../${configDir}
.replaceAll(/"(?:\.\.\/)*\${configDir}/g, `"${native2posix(configDir)}`)
);
}
/**
* find the closest tsconfig.json file
*
* @param {string} filename - path to file to find tsconfig for (absolute or relative to cwd)
* @param {import('./public.d.ts').TSConfckFindOptions} [options] - options
* @returns {Promise<string|null>} absolute path to closest tsconfig.json or null if not found
*/
async function find(filename, options) {
let dir = path$n.dirname(path$n.resolve(filename));
if (options?.ignoreNodeModules && isInNodeModules(dir)) {
return null;
}
const cache = options?.cache;
const configName = options?.configName ?? 'tsconfig.json';
if (cache?.hasConfigPath(dir, configName)) {
return cache.getConfigPath(dir, configName);
}
const { /** @type {Promise<string|null>} */ promise, resolve, reject } = makePromise();
if (options?.root && !path$n.isAbsolute(options.root)) {
options.root = path$n.resolve(options.root);
}
findUp(dir, { promise, resolve, reject }, options);
return promise;
}
/**
*
* @param {string} dir
* @param {{promise:Promise<string|null>,resolve:(result:string|null)=>void,reject:(err:any)=>void}} madePromise
* @param {import('./public.d.ts').TSConfckFindOptions} [options] - options
*/
function findUp(dir, { resolve, reject, promise }, options) {
const { cache, root, configName } = options ?? {};
if (cache) {
if (cache.hasConfigPath(dir, configName)) {
let cached;
try {
cached = cache.getConfigPath(dir, configName);
} catch (e) {
reject(e);
return;
}
if (cached?.then) {
cached.then(resolve).catch(reject);
} else {
resolve(cached);
}
} else {
cache.setConfigPath(dir, promise, configName);
}
}
const tsconfig = path$n.join(dir, options?.configName ?? 'tsconfig.json');
fs__default.stat(tsconfig, (err, stats) => {
if (stats && (stats.isFile() || stats.isFIFO())) {
resolve(tsconfig);
} else if (err?.code !== 'ENOENT') {
reject(err);
} else {
let parent;
if (root === dir || (parent = path$n.dirname(dir)) === dir) {
resolve(null);
} else {
findUp(parent, { promise, resolve, reject }, options);
}
}
});
}
/*
this file contains code from strip-bom and strip-json-comments by Sindre Sorhus
https://github.com/sindresorhus/strip-json-comments/blob/v4.0.0/index.js
https://github.com/sindresorhus/strip-bom/blob/v5.0.0/index.js
licensed under MIT, see ../LICENSE
*/
/**
* convert content of tsconfig.json to regular json
*
* @param {string} tsconfigJson - content of tsconfig.json
* @returns {string} content as regular json, comments and dangling commas have been replaced with whitespace
*/
function toJson(tsconfigJson) {
const stripped = stripDanglingComma(stripJsonComments(stripBom(tsconfigJson)));
if (stripped.trim() === '') {
// only whitespace left after stripping, return empty object so that JSON.parse still works
return '{}';
} else {
return stripped;
}
}
/**
* replace dangling commas from pseudo-json string with single space
* implementation heavily inspired by strip-json-comments
*
* @param {string} pseudoJson
* @returns {string}
*/
function stripDanglingComma(pseudoJson) {
let insideString = false;
let offset = 0;
let result = '';
let danglingCommaPos = null;
for (let i = 0; i < pseudoJson.length; i++) {
const currentCharacter = pseudoJson[i];
if (currentCharacter === '"') {
const escaped = isEscaped(pseudoJson, i);
if (!escaped) {
insideString = !insideString;
}
}
if (insideString) {
danglingCommaPos = null;
continue;
}
if (currentCharacter === ',') {
danglingCommaPos = i;
continue;
}
if (danglingCommaPos) {
if (currentCharacter === '}' || currentCharacter === ']') {
result += pseudoJson.slice(offset, danglingCommaPos) + ' ';
offset = danglingCommaPos + 1;
danglingCommaPos = null;
} else if (!currentCharacter.match(/\s/)) {
danglingCommaPos = null;
}
}
}
return result + pseudoJson.substring(offset);
}
// start strip-json-comments
/**
*
* @param {string} jsonString
* @param {number} quotePosition
* @returns {boolean}
*/
function isEscaped(jsonString, quotePosition) {
let index = quotePosition - 1;
let backslashCount = 0;
while (jsonString[index] === '\\') {
index -= 1;
backslashCount += 1;
}
return Boolean(backslashCount % 2);
}
/**
*
* @param {string} string
* @param {number?} start
* @param {number?} end
*/
function strip(string, start, end) {
return string.slice(start, end).replace(/\S/g, ' ');
}
const singleComment = Symbol('singleComment');
const multiComment = Symbol('multiComment');
/**
* @param {string} jsonString
* @returns {string}
*/
function stripJsonComments(jsonString) {
let isInsideString = false;
/** @type {false | symbol} */
let isInsideComment = false;
let offset = 0;
let result = '';
for (let index = 0; index < jsonString.length; index++) {
const currentCharacter = jsonString[index];
const nextCharacter = jsonString[index + 1];
if (!isInsideComment && currentCharacter === '"') {
const escaped = isEscaped(jsonString, index);
if (!escaped) {
isInsideString = !isInsideString;
}
}
if (isInsideString) {
continue;
}
if (!isInsideComment && currentCharacter + nextCharacter === '//') {
result += jsonString.slice(offset, index);
offset = index;
isInsideComment = singleComment;
index++;
} else if (isInsideComment === singleComment && currentCharacter + nextCharacter === '\r\n') {
index++;
isInsideComment = false;
result += strip(jsonString, offset, index);
offset = index;
} else if (isInsideComment === singleComment && currentCharacter === '\n') {
isInsideComment = false;
result += strip(jsonString, offset, index);
offset = index;
} else if (!isInsideComment && currentCharacter + nextCharacter === '/*') {
result += jsonString.slice(offset, index);
offset = index;
isInsideComment = multiComment;
index++;
} else if (isInsideComment === multiComment && currentCharacter + nextCharacter === '*/') {
index++;
isInsideComment = false;
result += strip(jsonString, offset, index + 1);
offset = index + 1;
}
}
return result + (isInsideComment ? strip(jsonString.slice(offset)) : jsonString.slice(offset));
}
// end strip-json-comments
// start strip-bom
/**
* @param {string} string
* @returns {string}
*/
function stripBom(string) {
// Catches EFBBBF (UTF-8 BOM) because the buffer-to-string
// conversion translates it to FEFF (UTF-16 BOM).
if (string.charCodeAt(0) === 0xfeff) {
return string.slice(1);
}
return string;
}
// end strip-bom
const not_found_result = {
tsconfigFile: null,
tsconfig: {}
};
/**
* parse the closest tsconfig.json file
*
* @param {string} filename - path to a tsconfig .json or a source file or directory (absolute or relative to cwd)
* @param {import('./public.d.ts').TSConfckParseOptions} [options] - options
* @returns {Promise<import('./public.d.ts').TSConfckParseResult>}
* @throws {TSConfckParseError}
*/
async function parse$e(filename, options) {
/** @type {import('./cache.js').TSConfckCache} */
const cache = options?.cache;
if (cache?.hasParseResult(filename)) {
return getParsedDeep(filename, cache, options);
}
const {
resolve,
reject,
/** @type {Promise<import('./public.d.ts').TSConfckParseResult>}*/
promise
} = makePromise();
cache?.setParseResult(filename, promise, true);
try {
let tsconfigFile =
(await resolveTSConfigJson(filename, cache)) || (await find(filename, options));
if (!tsconfigFile) {
resolve(not_found_result);
return promise;
}
let result;
if (filename !== tsconfigFile && cache?.hasParseResult(tsconfigFile)) {
result = await getParsedDeep(tsconfigFile, cache, options);
} else {
result = await parseFile$1(tsconfigFile, cache, filename === tsconfigFile);
await Promise.all([parseExtends(result, cache), parseReferences(result, options)]);
}
result.tsconfig = replaceTokens(result.tsconfig, path$n.dirname(tsconfigFile));
resolve(resolveSolutionTSConfig(filename, result));
} catch (e) {
reject(e);
}
return promise;
}
/**
* ensure extends and references are parsed
*
* @param {string} filename - cached file
* @param {import('./cache.js').TSConfckCache} cache - cache
* @param {import('./public.d.ts').TSConfckParseOptions} options - options
*/
async function getParsedDeep(filename, cache, options) {
const result = await cache.getParseResult(filename);
if (
(result.tsconfig.extends && !result.extended) ||
(result.tsconfig.references && !result.referenced)
) {
const promise = Promise.all([
parseExtends(result, cache),
parseReferences(result, options)
]).then(() => result);
cache.setParseResult(filename, promise, true);
return promise;
}
return result;
}
/**
*
* @param {string} tsconfigFile - path to tsconfig file
* @param {import('./cache.js').TSConfckCache} [cache] - cache
* @param {boolean} [skipCache] - skip cache
* @returns {Promise<import('./public.d.ts').TSConfckParseResult>}
*/
async function parseFile$1(tsconfigFile, cache, skipCache) {
if (
!skipCache &&
cache?.hasParseResult(tsconfigFile) &&
!cache.getParseResult(tsconfigFile)._isRootFile_
) {
return cache.getParseResult(tsconfigFile);
}
const promise = promises$1
.readFile(tsconfigFile, 'utf-8')
.then(toJson)
.then((json) => {
const parsed = JSON.parse(json);
applyDefaults(parsed, tsconfigFile);
return {
tsconfigFile,
tsconfig: normalizeTSConfig(parsed, path$n.dirname(tsconfigFile))
};
})
.catch((e) => {
throw new TSConfckParseError(
`parsing ${tsconfigFile} failed: ${e}`,
'PARSE_FILE',
tsconfigFile,
e
);
});
if (
!skipCache &&
(!cache?.hasParseResult(tsconfigFile) || !cache.getParseResult(tsconfigFile)._isRootFile_)
) {
cache?.setParseResult(tsconfigFile, promise);
}
return promise;
}
/**
* normalize to match the output of ts.parseJsonConfigFileContent
*
* @param {any} tsconfig - typescript tsconfig output
* @param {string} dir - directory
*/
function normalizeTSConfig(tsconfig, dir) {
// set baseUrl to absolute path
const baseUrl = tsconfig.compilerOptions?.baseUrl;
if (baseUrl && !baseUrl.startsWith('${') && !path$n.isAbsolute(baseUrl)) {
tsconfig.compilerOptions.baseUrl = resolve2posix(dir, baseUrl);
}
return tsconfig;
}
/**
*
* @param {import('./public.d.ts').TSConfckParseResult} result
* @param {import('./public.d.ts').TSConfckParseOptions} [options]
* @returns {Promise<void>}
*/
async function parseReferences(result, options) {
if (!result.tsconfig.references) {
return;
}
const referencedFiles = resolveReferencedTSConfigFiles(result, options);
const referenced = await Promise.all(
referencedFiles.map((file) => parseFile$1(file, options?.cache))
);
await Promise.all(referenced.map((ref) => parseExtends(ref, options?.cache)));
referenced.forEach((ref) => {
ref.solution = result;
});
result.referenced = referenced;
}
/**
* @param {import('./public.d.ts').TSConfckParseResult} result
* @param {import('./cache.js').TSConfckCache}[cache]
* @returns {Promise<void>}
*/
async function parseExtends(result, cache) {
if (!result.tsconfig.extends) {
return;
}
// use result as first element in extended
// but dereference tsconfig so that mergeExtended can modify the original without affecting extended[0]
/** @type {import('./public.d.ts').TSConfckParseResult[]} */
const extended = [
{ tsconfigFile: result.tsconfigFile, tsconfig: JSON.parse(JSON.stringify(result.tsconfig)) }
];
// flatten extends graph into extended
let pos = 0;
/** @type {string[]} */
const extendsPath = [];
let currentBranchDepth = 0;
while (pos < extended.length) {
const extending = extended[pos];
extendsPath.push(extending.tsconfigFile);
if (extending.tsconfig.extends) {
// keep following this branch
currentBranchDepth += 1;
/** @type {string[]} */
let resolvedExtends;
if (!Array.isArray(extending.tsconfig.extends)) {
resolvedExtends = [resolveExtends(extending.tsconfig.extends, extending.tsconfigFile)];
} else {
// reverse because typescript 5.0 treats ['a','b','c'] as c extends b extends a
resolvedExtends = extending.tsconfig.extends
.reverse()
.map((ex) => resolveExtends(ex, extending.tsconfigFile));
}
const circularExtends = resolvedExtends.find((tsconfigFile) =>
extendsPath.includes(tsconfigFile)
);
if (circularExtends) {
const circle = extendsPath.concat([circularExtends]).join(' -> ');
throw new TSConfckParseError(
`Circular dependency in "extends": ${circle}`,
'EXTENDS_CIRCULAR',
result.tsconfigFile
);
}
// add new extends to the list directly after current
extended.splice(
pos + 1,
0,
...(await Promise.all(resolvedExtends.map((file) => parseFile$1(file, cache))))
);
} else {
// reached a leaf, backtrack to the last branching point and continue
extendsPath.splice(-currentBranchDepth);
currentBranchDepth = 0;
}
pos = pos + 1;
}
result.extended = extended;
// skip first as it is the original config
for (const ext of result.extended.slice(1)) {
extendTSConfig(result, ext);
}
}
/**
*
* @param {string} extended
* @param {string} from
* @returns {string}
*/
function resolveExtends(extended, from) {
if (extended === '..') {
// see #149
extended = '../tsconfig.json';
}
const req = createRequire$2(from);
let error;
try {
return req.resolve(extended);
} catch (e) {
error = e;
}
if (extended[0] !== '.' && !path$n.isAbsolute(extended)) {
try {
return req.resolve(`${extended}/tsconfig.json`);
} catch (e) {
error = e;
}
}
throw new TSConfckParseError(
`failed to resolve "extends":"${extended}" in ${from}`,
'EXTENDS_RESOLVE',
from,
error
);
}
// references, extends and custom keys are not carried over
const EXTENDABLE_KEYS = [
'compilerOptions',
'files',
'include',
'exclude',
'watchOptions',
'compileOnSave',
'typeAcquisition',
'buildOptions'
];
/**
*
* @param {import('./public.d.ts').TSConfckParseResult} extending
* @param {import('./public.d.ts').TSConfckParseResult} extended
* @returns void
*/
function extendTSConfig(extending, extended) {
const extendingConfig = extending.tsconfig;
const extendedConfig = extended.tsconfig;
const relativePath = native2posix(
path$n.relative(path$n.dirname(extending.tsconfigFile), path$n.dirname(extended.tsconfigFile))
);
for (const key of Object.keys(extendedConfig).filter((key) => EXTENDABLE_KEYS.includes(key))) {
if (key === 'compilerOptions') {
if (!extendingConfig.compilerOptions) {
extendingConfig.compilerOptions = {};
}
for (const option of Object.keys(extendedConfig.compilerOptions)) {
if (Object.prototype.hasOwnProperty.call(extendingConfig.compilerOptions, option)) {
continue; // already set
}
extendingConfig.compilerOptions[option] = rebaseRelative(
option,
extendedConfig.compilerOptions[option],
relativePath
);
}
} else if (extendingConfig[key] === undefined) {
if (key === 'watchOptions') {
extendingConfig.watchOptions = {};
for (const option of Object.keys(extendedConfig.watchOptions)) {
extendingConfig.watchOptions[option] = rebaseRelative(
option,
extendedConfig.watchOptions[option],
relativePath
);
}
} else {
extendingConfig[key] = rebaseRelative(key, extendedConfig[key], relativePath);
}
}
}
}
const REBASE_KEYS = [
// root
'files',
'include',
'exclude',
// compilerOptions
'baseUrl',
'rootDir',
'rootDirs',
'typeRoots',
'outDir',
'outFile',
'declarationDir',
// watchOptions
'excludeDirectories',
'excludeFiles'
];
/** @typedef {string | string[]} PathValue */
/**
*
* @param {string} key
* @param {PathValue} value
* @param {string} prependPath
* @returns {PathValue}
*/
function rebaseRelative(key, value, prependPath) {
if (!REBASE_KEYS.includes(key)) {
return value;
}
if (Array.isArray(value)) {
return value.map((x) => rebasePath(x, prependPath));
} else {
return rebasePath(value, prependPath);
}
}
/**
*
* @param {string} value
* @param {string} prependPath
* @returns {string}
*/
function rebasePath(value, prependPath) {
if (path$n.isAbsolute(value)) {
return value;
} else {
// relative paths use posix syntax in tsconfig
return path$n.posix.normalize(path$n.posix.join(prependPath, value));
}
}
class TSConfckParseError extends Error {
/**
* error code
* @type {string}
*/
code;
/**
* error cause
* @type { Error | undefined}
*/
cause;
/**
* absolute path of tsconfig file where the error happened
* @type {string}
*/
tsconfigFile;
/**
*
* @param {string} message - error message
* @param {string} code - error code
* @param {string} tsconfigFile - path to tsconfig file
* @param {Error?} cause - cause of this error
*/
constructor(message, code, tsconfigFile, cause) {
super(message);
// Set the prototype explicitly.
Object.setPrototypeOf(this, TSConfckParseError.prototype);
this.name = TSConfckParseError.name;
this.code = code;
this.cause = cause;
this.tsconfigFile = tsconfigFile;
}
}
/**
*
* @param {any} tsconfig
* @param {string} tsconfigFile
*/
function applyDefaults(tsconfig, tsconfigFile) {
if (isJSConfig(tsconfigFile)) {
tsconfig.compilerOptions = {
...DEFAULT_JSCONFIG_COMPILER_OPTIONS,
...tsconfig.compilerOptions
};
}
}
const DEFAULT_JSCONFIG_COMPILER_OPTIONS = {
allowJs: true,
maxNodeModuleJsDepth: 2,
allowSyntheticDefaultImports: true,
skipLibCheck: true,
noEmit: true
};
/**
* @param {string} configFileName
*/
function isJSConfig(configFileName) {
return path$n.basename(configFileName) === 'jsconfig.json';
}
/** @template T */
class TSConfckCache {
/**
* clear cache, use this if you have a long running process and tsconfig files have been added,changed or deleted
*/
clear() {
this.#configPaths.clear();
this.#parsed.clear();
}
/**
* has cached closest config for files in dir
* @param {string} dir
* @param {string} [configName=tsconfig.json]
* @returns {boolean}
*/
hasConfigPath(dir, configName = 'tsconfig.json') {
return this.#configPaths.has(`${dir}/${configName}`);
}
/**
* get cached closest tsconfig for files in dir
* @param {string} dir
* @param {string} [configName=tsconfig.json]
* @returns {Promise<string|null>|string|null}
* @throws {unknown} if cached value is an error
*/
getConfigPath(dir, configName = 'tsconfig.json') {
const key = `${dir}/${configName}`;
const value = this.#configPaths.get(key);
if (value == null || value.length || value.then) {
return value;
} else {
throw value;
}
}
/**
* has parsed tsconfig for file
* @param {string} file
* @returns {boolean}
*/
hasParseResult(file) {
return this.#parsed.has(file);
}
/**
* get parsed tsconfig for file
* @param {string} file
* @returns {Promise<T>|T}
* @throws {unknown} if cached value is an error
*/
getParseResult(file) {
const value = this.#parsed.get(file);
if (value.then || value.tsconfig) {
return value;
} else {
throw value; // cached error, rethrow
}
}
/**
* @internal
* @private
* @param file
* @param {boolean} isRootFile a flag to check if current file which involking the parse() api, used to distinguish the normal cache which only parsed by parseFile()
* @param {Promise<T>} result
*/
setParseResult(file, result, isRootFile = false) {
// _isRootFile_ is a temporary property for Promise result, used to prevent deadlock with cache
Object.defineProperty(result, '_isRootFile_', {
value: isRootFile,
writable: false,
enumerable: false,
configurable: false
});
this.#parsed.set(file, result);
result
.then((parsed) => {
if (this.#parsed.get(file) === result) {
this.#parsed.set(file, parsed);
}
})
.catch((e) => {
if (this.#parsed.get(file) === result) {
this.#parsed.set(file, e);
}
});
}
/**
* @internal
* @private
* @param {string} dir
* @param {Promise<string|null>} configPath
* @param {string} [configName=tsconfig.json]
*/
setConfigPath(dir, configPath, configName = 'tsconfig.json') {
const key = `${dir}/${configName}`;
this.#configPaths.set(key, configPath);
configPath
.then((path) => {
if (this.#configPaths.get(key) === configPath) {
this.#configPaths.set(key, path);
}
})
.catch((e) => {
if (this.#configPaths.get(key) === configPath) {
this.#configPaths.set(key, e);
}
});
}
/**
* map directories to their closest tsconfig.json
* @internal
* @private
* @type{Map<string,(Promise<string|null>|string|null)>}
*/
#configPaths = new Map();
/**
* map files to their parsed tsconfig result
* @internal
* @private
* @type {Map<string,(Promise<T>|T)> }
*/
#parsed = new Map();
}
const debug$h = createDebugger("vite:esbuild");
const IIFE_BEGIN_RE = /(?:const|var)\s+\S+\s*=\s*function\([^()]*\)\s*\{\s*"use strict";/;
const validExtensionRE = /\.\w+$/;
const jsxExtensionsRE = /\.(?:j|t)sx\b/;
const defaultEsbuildSupported = {
"dynamic-import": true,
"import-meta": true
};
let server;
async function transformWithEsbuild(code, filename, options, inMap) {
let loader = options?.loader;
if (!loader) {
const ext = path$n.extname(validExtensionRE.test(filename) ? filename : cleanUrl(filename)).slice(1);
if (ext === "cjs" || ext === "mjs") {
loader = "js";
} else if (ext === "cts" || ext === "mts") {
loader = "ts";
} else {
loader = ext;
}
}
let tsconfigRaw = options?.tsconfigRaw;
if (typeof tsconfigRaw !== "string") {
const meaningfulFields = [
"alwaysStrict",
"experimentalDecorators",
"importsNotUsedAsValues",
"jsx",
"jsxFactory",
"jsxFragmentFactory",
"jsxImportSource",
"preserveValueImports",
"target",
"useDefineForClassFields",
"verbatimModuleSyntax"
];
const compilerOptionsForFile = {};
if (loader === "ts" || loader === "tsx") {
const loadedTsconfig = await loadTsconfigJsonForFile(filename);
const loadedCompilerOptions = loadedTsconfig.compilerOptions ?? {};
for (const field of meaningfulFields) {
if (field in loadedCompilerOptions) {
compilerOptionsForFile[field] = loadedCompilerOptions[field];
}
}
}
const compilerOptions = {
...compilerOptionsForFile,
...tsconfigRaw?.compilerOptions
};
if (compilerOptions.useDefineForClassFields === void 0 && compilerOptions.target === void 0) {
compilerOptions.useDefineForClassFields = false;
}
if (options) {
options.jsx && (compilerOptions.jsx = void 0);
options.jsxFactory && (compilerOptions.jsxFactory = void 0);
options.jsxFragment && (compilerOptions.jsxFragmentFactory = void 0);
options.jsxImportSource && (compilerOptions.jsxImportSource = void 0);
}
tsconfigRaw = {
...tsconfigRaw,
compilerOptions
};
}
const resolvedOptions = {
sourcemap: true,
// ensure source file name contains full query
sourcefile: filename,
...options,
loader,
tsconfigRaw
};
delete resolvedOptions.include;
delete resolvedOptions.exclude;
delete resolvedOptions.jsxInject;
try {
const result = await transform$1(code, resolvedOptions);
let map;
if (inMap && resolvedOptions.sourcemap) {
const nextMap = JSON.parse(result.map);
nextMap.sourcesContent = [];
map = combineSourcemaps(filename, [
nextMap,
inMap
]);
} else {
map = resolvedOptions.sourcemap && resolvedOptions.sourcemap !== "inline" ? JSON.parse(result.map) : { mappings: "" };
}
return {
...result,
map
};
} catch (e) {
debug$h?.(`esbuild error with options used: `, resolvedOptions);
if (e.errors) {
e.frame = "";
e.errors.forEach((m) => {
if (m.text === "Experimental decorators are not currently enabled" || m.text === "Parameter decorators only work when experimental decorators are enabled") {
m.text += '. Vite 5 now uses esbuild 0.18 and you need to enable them by adding "experimentalDecorators": true in your "tsconfig.json" file.';
}
e.frame += `
` + prettifyMessage(m, code);
});
e.loc = e.errors[0].location;
}
throw e;
}
}
function esbuildPlugin(config) {
const options = config.esbuild;
const { jsxInject, include, exclude, ...esbuildTransformOptions } = options;
const filter = createFilter(include || /\.(m?ts|[jt]sx)$/, exclude || /\.js$/);
const transformOptions = {
target: "esnext",
charset: "utf8",
...esbuildTransformOptions,
minify: false,
minifyIdentifiers: false,
minifySyntax: false,
minifyWhitespace: false,
treeShaking: false,
// keepNames is not needed when minify is disabled.
// Also transforming multiple times with keepNames enabled breaks
// tree-shaking. (#9164)
keepNames: false,
supported: {
...defaultEsbuildSupported,
...esbuildTransformOptions.supported
}
};
return {
name: "vite:esbuild",
configureServer(_server) {
server = _server;
server.watcher.on("add", reloadOnTsconfigChange).on("change", reloadOnTsconfigChange).on("unlink", reloadOnTsconfigChange);
},
buildEnd() {
server = null;
},
async transform(code, id) {
if (filter(id) || filter(cleanUrl(id))) {
const result = await transformWithEsbuild(code, id, transformOptions);
if (result.warnings.length) {
result.warnings.forEach((m) => {
this.warn(prettifyMessage(m, code));
});
}
if (jsxInject && jsxExtensionsRE.test(id)) {
result.code = jsxInject + ";" + result.code;
}
return {
code: result.code,
map: result.map
};
}
}
};
}
const rollupToEsbuildFormatMap = {
es: "esm",
cjs: "cjs",
// passing `var Lib = (() => {})()` to esbuild with format = "iife"
// will turn it to `(() => { var Lib = (() => {})() })()`,
// so we remove the format config to tell esbuild not doing this
//
// although esbuild doesn't change format, there is still possibility
// that `{ treeShaking: true }` removes a top-level no-side-effect variable
// like: `var Lib = 1`, which becomes `` after esbuild transforming,
// but thankfully rollup does not do this optimization now
iife: void 0
};
const buildEsbuildPlugin = (config) => {
return {
name: "vite:esbuild-transpile",
async renderChunk(code, chunk, opts) {
if (opts.__vite_skip_esbuild__) {
return null;
}
const options = resolveEsbuildTranspileOptions(config, opts.format);
if (!options) {
return null;
}
const res = await transformWithEsbuild(code, chunk.fileName, options);
if (config.build.lib) {
const esbuildCode = res.code;
const contentIndex = opts.format === "iife" ? Math.max(esbuildCode.search(IIFE_BEGIN_RE), 0) : opts.format === "umd" ? esbuildCode.indexOf(`(function(`) : 0;
if (contentIndex > 0) {
const esbuildHelpers = esbuildCode.slice(0, contentIndex);
res.code = esbuildCode.slice(contentIndex).replace(`"use strict";`, `"use strict";` + esbuildHelpers);
}
}
return res;
}
};
};
function resolveEsbuildTranspileOptions(config, format) {
const target = config.build.target;
const minify = config.build.minify === "esbuild";
if ((!target || target === "esnext") && !minify) {
return null;
}
const isEsLibBuild = config.build.lib && format === "es";
const esbuildOptions = config.esbuild || {};
const options = {
charset: "utf8",
...esbuildOptions,
loader: "js",
target: target || void 0,
format: rollupToEsbuildFormatMap[format],
supported: {
...defaultEsbuildSupported,
...esbuildOptions.supported
}
};
if (!minify) {
return {
...options,
minify: false,
minifyIdentifiers: false,
minifySyntax: false,
minifyWhitespace: false,
treeShaking: false
};
}
if (options.minifyIdentifiers != null || options.minifySyntax != null || options.minifyWhitespace != null) {
if (isEsLibBuild) {
return {
...options,
minify: false,
minifyIdentifiers: options.minifyIdentifiers ?? true,
minifySyntax: options.minifySyntax ?? true,
minifyWhitespace: false,
treeShaking: true
};
} else {
return {
...options,
minify: false,
minifyIdentifiers: options.minifyIdentifiers ?? true,
minifySyntax: options.minifySyntax ?? true,
minifyWhitespace: options.minifyWhitespace ?? true,
treeShaking: true
};
}
}
if (isEsLibBuild) {
return {
...options,
minify: false,
minifyIdentifiers: true,
minifySyntax: true,
minifyWhitespace: false,
treeShaking: true
};
} else {
return {
...options,
minify: true,
treeShaking: true
};
}
}
function prettifyMessage(m, code) {
let res = colors$1.yellow(m.text);
if (m.location) {
res += `
` + generateCodeFrame(code, m.location);
}
return res + `
`;
}
let tsconfckCache;
async function loadTsconfigJsonForFile(filename) {
try {
if (!tsconfckCache) {
tsconfckCache = new TSConfckCache();
}
const result = await parse$e(filename, {
cache: tsconfckCache,
ignoreNodeModules: true
});
if (server && result.tsconfigFile) {
ensureWatchedFile(server.watcher, result.tsconfigFile, server.config.root);
}
return result.tsconfig;
} catch (e) {
if (e instanceof TSConfckParseError) {
if (server && e.tsconfigFile) {
ensureWatchedFile(server.watcher, e.tsconfigFile, server.config.root);
}
}
throw e;
}
}
async function reloadOnTsconfigChange(changedFile) {
if (!server) return;
if (path$n.basename(changedFile) === "tsconfig.json" || changedFile.endsWith(".json") && tsconfckCache?.hasParseResult(changedFile)) {
server.config.logger.info(
`changed tsconfig file detected: ${changedFile} - Clearing cache and forcing full-reload to ensure TypeScript is compiled with updated config values.`,
{ clear: server.config.clearScreen, timestamp: true }
);
server.moduleGraph.invalidateAll();
tsconfckCache?.clear();
if (server) {
server.hot.send({
type: "full-reload",
path: "*"
});
}
}
}
// src/realWorker.ts
var Worker = class {
/** @internal */
_code;
/** @internal */
_parentFunctions;
/** @internal */
_max;
/** @internal */
_pool;
/** @internal */
_idlePool;
/** @internal */
_queue;
constructor(fn, options = {}) {
this._code = genWorkerCode(fn, options.parentFunctions ?? {});
this._parentFunctions = options.parentFunctions ?? {};
const defaultMax = Math.max(
1,
// os.availableParallelism is available from Node.js 18.14.0
(os$5.availableParallelism?.() ?? os$5.cpus().length) - 1
);
this._max = options.max || defaultMax;
this._pool = [];
this._idlePool = [];
this._queue = [];
}
async run(...args) {
const worker = await this._getAvailableWorker();
return new Promise((resolve, reject) => {
worker.currentResolve = resolve;
worker.currentReject = reject;
worker.postMessage({ type: "run", args });
});
}
stop() {
this._pool.forEach((w) => w.unref());
this._queue.forEach(
([, reject]) => reject(
new Error("Main worker pool stopped before a worker was available.")
)
);
this._pool = [];
this._idlePool = [];
this._queue = [];
}
/** @internal */
async _getAvailableWorker() {
if (this._idlePool.length) {
return this._idlePool.shift();
}
if (this._pool.length < this._max) {
const worker = new Worker$1(this._code, { eval: true });
worker.on("message", async (args) => {
if (args.type === "run") {
if ("result" in args) {
worker.currentResolve && worker.currentResolve(args.result);
worker.currentResolve = null;
} else {
if (args.error instanceof ReferenceError) {
args.error.message += ". Maybe you forgot to pass the function to parentFunction?";
}
worker.currentReject && worker.currentReject(args.error);
worker.currentReject = null;
}
this._assignDoneWorker(worker);
} else if (args.type === "parentFunction") {
try {
const result = await this._parentFunctions[args.name](...args.args);
worker.postMessage({ type: "parentFunction", id: args.id, result });
} catch (e) {
worker.postMessage({
type: "parentFunction",
id: args.id,
error: e
});
}
}
});
worker.on("error", (err) => {
worker.currentReject && worker.currentReject(err);
worker.currentReject = null;
});
worker.on("exit", (code) => {
const i = this._pool.indexOf(worker);
if (i > -1)
this._pool.splice(i, 1);
if (code !== 0 && worker.currentReject) {
worker.currentReject(
new Error(`Worker stopped with non-0 exit code ${code}`)
);
worker.currentReject = null;
}
});
this._pool.push(worker);
return worker;
}
let resolve;
let reject;
const onWorkerAvailablePromise = new Promise((r, rj) => {
resolve = r;
reject = rj;
});
this._queue.push([resolve, reject]);
return onWorkerAvailablePromise;
}
/** @internal */
_assignDoneWorker(worker) {
if (this._queue.length) {
const [resolve] = this._queue.shift();
resolve(worker);
return;
}
this._idlePool.push(worker);
}
};
function genWorkerCode(fn, parentFunctions) {
const createParentFunctionCaller = (parentPort) => {
let id = 0;
const resolvers = /* @__PURE__ */ new Map();
const call = (key) => async (...args) => {
id++;
let resolve, reject;
const promise = new Promise((res, rej) => {
resolve = res;
reject = rej;
});
resolvers.set(id, { resolve, reject });
parentPort.postMessage({ type: "parentFunction", id, name: key, args });
return await promise;
};
const receive = (id2, args) => {
if (resolvers.has(id2)) {
const { resolve, reject } = resolvers.get(id2);
resolvers.delete(id2);
if ("result" in args) {
resolve(args.result);
} else {
reject(args.error);
}
}
};
return { call, receive };
};
return `
const { parentPort } = require('worker_threads')
const parentFunctionCaller = (${createParentFunctionCaller.toString()})(parentPort)
const doWork = (() => {
${Object.keys(parentFunctions).map(
(key) => `const ${key} = parentFunctionCaller.call(${JSON.stringify(key)});`
).join("\n")}
return (${fn.toString()})()
})()
parentPort.on('message', async (args) => {
if (args.type === 'run') {
try {
const res = await doWork(...args.args)
parentPort.postMessage({ type: 'run', result: res })
} catch (e) {
parentPort.postMessage({ type: 'run', error: e })
}
} else if (args.type === 'parentFunction') {
parentFunctionCaller.receive(args.id, args)
}
})
`;
}
var FakeWorker = class {
/** @internal */
_fn;
constructor(fn, options = {}) {
const argsAndCode = genFakeWorkerArgsAndCode(
fn,
options.parentFunctions ?? {}
);
const require2 = createRequire$1(import.meta.url);
this._fn = new Function(...argsAndCode)(require2, options.parentFunctions);
}
async run(...args) {
try {
return await this._fn(...args);
} catch (err) {
if (err instanceof ReferenceError) {
err.message += ". Maybe you forgot to pass the function to parentFunction?";
}
throw err;
}
}
stop() {
}
};
function genFakeWorkerArgsAndCode(fn, parentFunctions) {
return [
"require",
"parentFunctions",
`
${Object.keys(parentFunctions).map((key) => `const ${key} = parentFunctions[${JSON.stringify(key)}];`).join("\n")}
return (${fn.toString()})()
`
];
}
// src/workerWithFallback.ts
var WorkerWithFallback = class {
/** @internal */
_disableReal;
/** @internal */
_realWorker;
/** @internal */
_fakeWorker;
/** @internal */
_shouldUseFake;
constructor(fn, options) {
this._disableReal = options.max !== void 0 && options.max <= 0;
this._realWorker = new Worker(fn, options);
this._fakeWorker = new FakeWorker(fn, options);
this._shouldUseFake = options.shouldUseFake;
}
async run(...args) {
const useFake = this._disableReal || this._shouldUseFake(...args);
return this[useFake ? "_fakeWorker" : "_realWorker"].run(...args);
}
stop() {
this._realWorker.stop();
this._fakeWorker.stop();
}
};
let terserPath;
const loadTerserPath = (root) => {
if (terserPath) return terserPath;
try {
terserPath = requireResolveFromRootWithFallback(root, "terser");
} catch (e) {
if (e.code === "MODULE_NOT_FOUND") {
throw new Error(
"terser not found. Since Vite v3, terser has become an optional dependency. You need to install it."
);
} else {
const message = new Error(`terser failed to load:
${e.message}`);
message.stack = e.stack + "\n" + message.stack;
throw message;
}
}
return terserPath;
};
function terserPlugin(config) {
const { maxWorkers, ...terserOptions } = config.build.terserOptions;
const makeWorker = () => new Worker(
() => async (terserPath2, code, options) => {
const terser = require(terserPath2);
return terser.minify(code, options);
},
{
max: maxWorkers
}
);
let worker;
return {
name: "vite:terser",
async renderChunk(code, _chunk, outputOptions) {
if (config.build.minify !== "terser" && // @ts-expect-error injected by @vitejs/plugin-legacy
!outputOptions.__vite_force_terser__) {
return null;
}
if (config.build.lib && outputOptions.format === "es") {
return null;
}
worker ||= makeWorker();
const terserPath2 = loadTerserPath(config.root);
const res = await worker.run(terserPath2, code, {
safari10: true,
...terserOptions,
sourceMap: !!outputOptions.sourcemap,
module: outputOptions.format.startsWith("es"),
toplevel: outputOptions.format === "cjs"
});
return {
code: res.code,
map: res.map
};
},
closeBundle() {
worker?.stop();
}
};
}
const mimes = {
"3g2": "video/3gpp2",
"3gp": "video/3gpp",
"3gpp": "video/3gpp",
"3mf": "model/3mf",
"aac": "audio/aac",
"ac": "application/pkix-attr-cert",
"adp": "audio/adpcm",
"adts": "audio/aac",
"ai": "application/postscript",
"aml": "application/automationml-aml+xml",
"amlx": "application/automationml-amlx+zip",
"amr": "audio/amr",
"apng": "image/apng",
"appcache": "text/cache-manifest",
"appinstaller": "application/appinstaller",
"appx": "application/appx",
"appxbundle": "application/appxbundle",
"asc": "application/pgp-keys",
"atom": "application/atom+xml",
"atomcat": "application/atomcat+xml",
"atomdeleted": "application/atomdeleted+xml",
"atomsvc": "application/atomsvc+xml",
"au": "audio/basic",
"avci": "image/avci",
"avcs": "image/avcs",
"avif": "image/avif",
"aw": "application/applixware",
"bdoc": "application/bdoc",
"bin": "application/octet-stream",
"bmp": "image/bmp",
"bpk": "application/octet-stream",
"btf": "image/prs.btif",
"btif": "image/prs.btif",
"buffer": "application/octet-stream",
"ccxml": "application/ccxml+xml",
"cdfx": "application/cdfx+xml",
"cdmia": "application/cdmi-capability",
"cdmic": "application/cdmi-container",
"cdmid": "application/cdmi-domain",
"cdmio": "application/cdmi-object",
"cdmiq": "application/cdmi-queue",
"cer": "application/pkix-cert",
"cgm": "image/cgm",
"cjs": "application/node",
"class": "application/java-vm",
"coffee": "text/coffeescript",
"conf": "text/plain",
"cpl": "application/cpl+xml",
"cpt": "application/mac-compactpro",
"crl": "application/pkix-crl",
"css": "text/css",
"csv": "text/csv",
"cu": "application/cu-seeme",
"cwl": "application/cwl",
"cww": "application/prs.cww",
"davmount": "application/davmount+xml",
"dbk": "application/docbook+xml",
"deb": "application/octet-stream",
"def": "text/plain",
"deploy": "application/octet-stream",
"dib": "image/bmp",
"disposition-notification": "message/disposition-notification",
"dist": "application/octet-stream",
"distz": "application/octet-stream",
"dll": "application/octet-stream",
"dmg": "application/octet-stream",
"dms": "application/octet-stream",
"doc": "application/msword",
"dot": "application/msword",
"dpx": "image/dpx",
"drle": "image/dicom-rle",
"dsc": "text/prs.lines.tag",
"dssc": "application/dssc+der",
"dtd": "application/xml-dtd",
"dump": "application/octet-stream",
"dwd": "application/atsc-dwd+xml",
"ear": "application/java-archive",
"ecma": "application/ecmascript",
"elc": "application/octet-stream",
"emf": "image/emf",
"eml": "message/rfc822",
"emma": "application/emma+xml",
"emotionml": "application/emotionml+xml",
"eps": "application/postscript",
"epub": "application/epub+zip",
"exe": "application/octet-stream",
"exi": "application/exi",
"exp": "application/express",
"exr": "image/aces",
"ez": "application/andrew-inset",
"fdf": "application/fdf",
"fdt": "application/fdt+xml",
"fits": "image/fits",
"g3": "image/g3fax",
"gbr": "application/rpki-ghostbusters",
"geojson": "application/geo+json",
"gif": "image/gif",
"glb": "model/gltf-binary",
"gltf": "model/gltf+json",
"gml": "application/gml+xml",
"gpx": "application/gpx+xml",
"gram": "application/srgs",
"grxml": "application/srgs+xml",
"gxf": "application/gxf",
"gz": "application/gzip",
"h261": "video/h261",
"h263": "video/h263",
"h264": "video/h264",
"heic": "image/heic",
"heics": "image/heic-sequence",
"heif": "image/heif",
"heifs": "image/heif-sequence",
"hej2": "image/hej2k",
"held": "application/atsc-held+xml",
"hjson": "application/hjson",
"hlp": "application/winhlp",
"hqx": "application/mac-binhex40",
"hsj2": "image/hsj2",
"htm": "text/html",
"html": "text/html",
"ics": "text/calendar",
"ief": "image/ief",
"ifb": "text/calendar",
"iges": "model/iges",
"igs": "model/iges",
"img": "application/octet-stream",
"in": "text/plain",
"ini": "text/plain",
"ink": "application/inkml+xml",
"inkml": "application/inkml+xml",
"ipfix": "application/ipfix",
"iso": "application/octet-stream",
"its": "application/its+xml",
"jade": "text/jade",
"jar": "application/java-archive",
"jhc": "image/jphc",
"jls": "image/jls",
"jp2": "image/jp2",
"jpe": "image/jpeg",
"jpeg": "image/jpeg",
"jpf": "image/jpx",
"jpg": "image/jpeg",
"jpg2": "image/jp2",
"jpgm": "image/jpm",
"jpgv": "video/jpeg",
"jph": "image/jph",
"jpm": "image/jpm",
"jpx": "image/jpx",
"js": "text/javascript",
"json": "application/json",
"json5": "application/json5",
"jsonld": "application/ld+json",
"jsonml": "application/jsonml+json",
"jsx": "text/jsx",
"jt": "model/jt",
"jxr": "image/jxr",
"jxra": "image/jxra",
"jxrs": "image/jxrs",
"jxs": "image/jxs",
"jxsc": "image/jxsc",
"jxsi": "image/jxsi",
"jxss": "image/jxss",
"kar": "audio/midi",
"ktx": "image/ktx",
"ktx2": "image/ktx2",
"less": "text/less",
"lgr": "application/lgr+xml",
"list": "text/plain",
"litcoffee": "text/coffeescript",
"log": "text/plain",
"lostxml": "application/lost+xml",
"lrf": "application/octet-stream",
"m1v": "video/mpeg",
"m21": "application/mp21",
"m2a": "audio/mpeg",
"m2v": "video/mpeg",
"m3a": "audio/mpeg",
"m4a": "audio/mp4",
"m4p": "application/mp4",
"m4s": "video/iso.segment",
"ma": "application/mathematica",
"mads": "application/mads+xml",
"maei": "application/mmt-aei+xml",
"man": "text/troff",
"manifest": "text/cache-manifest",
"map": "application/json",
"mar": "application/octet-stream",
"markdown": "text/markdown",
"mathml": "application/mathml+xml",
"mb": "application/mathematica",
"mbox": "application/mbox",
"md": "text/markdown",
"mdx": "text/mdx",
"me": "text/troff",
"mesh": "model/mesh",
"meta4": "application/metalink4+xml",
"metalink": "application/metalink+xml",
"mets": "application/mets+xml",
"mft": "application/rpki-manifest",
"mid": "audio/midi",
"midi": "audio/midi",
"mime": "message/rfc822",
"mj2": "video/mj2",
"mjp2": "video/mj2",
"mjs": "text/javascript",
"mml": "text/mathml",
"mods": "application/mods+xml",
"mov": "video/quicktime",
"mp2": "audio/mpeg",
"mp21": "application/mp21",
"mp2a": "audio/mpeg",
"mp3": "audio/mpeg",
"mp4": "video/mp4",
"mp4a": "audio/mp4",
"mp4s": "application/mp4",
"mp4v": "video/mp4",
"mpd": "application/dash+xml",
"mpe": "video/mpeg",
"mpeg": "video/mpeg",
"mpf": "application/media-policy-dataset+xml",
"mpg": "video/mpeg",
"mpg4": "video/mp4",
"mpga": "audio/mpeg",
"mpp": "application/dash-patch+xml",
"mrc": "application/marc",
"mrcx": "application/marcxml+xml",
"ms": "text/troff",
"mscml": "application/mediaservercontrol+xml",
"msh": "model/mesh",
"msi": "application/octet-stream",
"msix": "application/msix",
"msixbundle": "application/msixbundle",
"msm": "application/octet-stream",
"msp": "application/octet-stream",
"mtl": "model/mtl",
"musd": "application/mmt-usd+xml",
"mxf": "application/mxf",
"mxmf": "audio/mobile-xmf",
"mxml": "application/xv+xml",
"n3": "text/n3",
"nb": "application/mathematica",
"nq": "application/n-quads",
"nt": "application/n-triples",
"obj": "model/obj",
"oda": "application/oda",
"oga": "audio/ogg",
"ogg": "audio/ogg",
"ogv": "video/ogg",
"ogx": "application/ogg",
"omdoc": "application/omdoc+xml",
"onepkg": "application/onenote",
"onetmp": "application/onenote",
"onetoc": "application/onenote",
"onetoc2": "application/onenote",
"opf": "application/oebps-package+xml",
"opus": "audio/ogg",
"otf": "font/otf",
"owl": "application/rdf+xml",
"oxps": "application/oxps",
"p10": "application/pkcs10",
"p7c": "application/pkcs7-mime",
"p7m": "application/pkcs7-mime",
"p7s": "application/pkcs7-signature",
"p8": "application/pkcs8",
"pdf": "application/pdf",
"pfr": "application/font-tdpfr",
"pgp": "application/pgp-encrypted",
"pkg": "application/octet-stream",
"pki": "application/pkixcmp",
"pkipath": "application/pkix-pkipath",
"pls": "application/pls+xml",
"png": "image/png",
"prc": "model/prc",
"prf": "application/pics-rules",
"provx": "application/provenance+xml",
"ps": "application/postscript",
"pskcxml": "application/pskc+xml",
"pti": "image/prs.pti",
"qt": "video/quicktime",
"raml": "application/raml+yaml",
"rapd": "application/route-apd+xml",
"rdf": "application/rdf+xml",
"relo": "application/p2p-overlay+xml",
"rif": "application/reginfo+xml",
"rl": "application/resource-lists+xml",
"rld": "application/resource-lists-diff+xml",
"rmi": "audio/midi",
"rnc": "application/relax-ng-compact-syntax",
"rng": "application/xml",
"roa": "application/rpki-roa",
"roff": "text/troff",
"rq": "application/sparql-query",
"rs": "application/rls-services+xml",
"rsat": "application/atsc-rsat+xml",
"rsd": "application/rsd+xml",
"rsheet": "application/urc-ressheet+xml",
"rss": "application/rss+xml",
"rtf": "text/rtf",
"rtx": "text/richtext",
"rusd": "application/route-usd+xml",
"s3m": "audio/s3m",
"sbml": "application/sbml+xml",
"scq": "application/scvp-cv-request",
"scs": "application/scvp-cv-response",
"sdp": "application/sdp",
"senmlx": "application/senml+xml",
"sensmlx": "application/sensml+xml",
"ser": "application/java-serialized-object",
"setpay": "application/set-payment-initiation",
"setreg": "application/set-registration-initiation",
"sgi": "image/sgi",
"sgm": "text/sgml",
"sgml": "text/sgml",
"shex": "text/shex",
"shf": "application/shf+xml",
"shtml": "text/html",
"sieve": "application/sieve",
"sig": "application/pgp-signature",
"sil": "audio/silk",
"silo": "model/mesh",
"siv": "application/sieve",
"slim": "text/slim",
"slm": "text/slim",
"sls": "application/route-s-tsid+xml",
"smi": "application/smil+xml",
"smil": "application/smil+xml",
"snd": "audio/basic",
"so": "application/octet-stream",
"spdx": "text/spdx",
"spp": "application/scvp-vp-response",
"spq": "application/scvp-vp-request",
"spx": "audio/ogg",
"sql": "application/sql",
"sru": "application/sru+xml",
"srx": "application/sparql-results+xml",
"ssdl": "application/ssdl+xml",
"ssml": "application/ssml+xml",
"stk": "application/hyperstudio",
"stl": "model/stl",
"stpx": "model/step+xml",
"stpxz": "model/step-xml+zip",
"stpz": "model/step+zip",
"styl": "text/stylus",
"stylus": "text/stylus",
"svg": "image/svg+xml",
"svgz": "image/svg+xml",
"swidtag": "application/swid+xml",
"t": "text/troff",
"t38": "image/t38",
"td": "application/urc-targetdesc+xml",
"tei": "application/tei+xml",
"teicorpus": "application/tei+xml",
"text": "text/plain",
"tfi": "application/thraud+xml",
"tfx": "image/tiff-fx",
"tif": "image/tiff",
"tiff": "image/tiff",
"toml": "application/toml",
"tr": "text/troff",
"trig": "application/trig",
"ts": "video/mp2t",
"tsd": "application/timestamped-data",
"tsv": "text/tab-separated-values",
"ttc": "font/collection",
"ttf": "font/ttf",
"ttl": "text/turtle",
"ttml": "application/ttml+xml",
"txt": "text/plain",
"u3d": "model/u3d",
"u8dsn": "message/global-delivery-status",
"u8hdr": "message/global-headers",
"u8mdn": "message/global-disposition-notification",
"u8msg": "message/global",
"ubj": "application/ubjson",
"uri": "text/uri-list",
"uris": "text/uri-list",
"urls": "text/uri-list",
"vcard": "text/vcard",
"vrml": "model/vrml",
"vtt": "text/vtt",
"vxml": "application/voicexml+xml",
"war": "application/java-archive",
"wasm": "application/wasm",
"wav": "audio/wav",
"weba": "audio/webm",
"webm": "video/webm",
"webmanifest": "application/manifest+json",
"webp": "image/webp",
"wgsl": "text/wgsl",
"wgt": "application/widget",
"wif": "application/watcherinfo+xml",
"wmf": "image/wmf",
"woff": "font/woff",
"woff2": "font/woff2",
"wrl": "model/vrml",
"wsdl": "application/wsdl+xml",
"wspolicy": "application/wspolicy+xml",
"x3d": "model/x3d+xml",
"x3db": "model/x3d+fastinfoset",
"x3dbz": "model/x3d+binary",
"x3dv": "model/x3d-vrml",
"x3dvz": "model/x3d+vrml",
"x3dz": "model/x3d+xml",
"xaml": "application/xaml+xml",
"xav": "application/xcap-att+xml",
"xca": "application/xcap-caps+xml",
"xcs": "application/calendar+xml",
"xdf": "application/xcap-diff+xml",
"xdssc": "application/dssc+xml",
"xel": "application/xcap-el+xml",
"xenc": "application/xenc+xml",
"xer": "application/patch-ops-error+xml",
"xfdf": "application/xfdf",
"xht": "application/xhtml+xml",
"xhtml": "application/xhtml+xml",
"xhvml": "application/xv+xml",
"xlf": "application/xliff+xml",
"xm": "audio/xm",
"xml": "text/xml",
"xns": "application/xcap-ns+xml",
"xop": "application/xop+xml",
"xpl": "application/xproc+xml",
"xsd": "application/xml",
"xsf": "application/prs.xsf+xml",
"xsl": "application/xml",
"xslt": "application/xml",
"xspf": "application/xspf+xml",
"xvm": "application/xv+xml",
"xvml": "application/xv+xml",
"yaml": "text/yaml",
"yang": "application/yang",
"yin": "application/yin+xml",
"yml": "text/yaml",
"zip": "application/zip"
};
function lookup(extn) {
let tmp = ('' + extn).trim().toLowerCase();
let idx = tmp.lastIndexOf('.');
return mimes[!~idx ? tmp : tmp.substring(++idx)];
}
const publicFilesMap = /* @__PURE__ */ new WeakMap();
async function initPublicFiles(config) {
let fileNames;
try {
fileNames = await recursiveReaddir(config.publicDir);
} catch (e) {
if (e.code === ERR_SYMLINK_IN_RECURSIVE_READDIR) {
return;
}
throw e;
}
const publicFiles = new Set(
fileNames.map((fileName) => fileName.slice(config.publicDir.length))
);
publicFilesMap.set(config, publicFiles);
return publicFiles;
}
function getPublicFiles(config) {
return publicFilesMap.get(config);
}
function checkPublicFile(url, config) {
const { publicDir } = config;
if (!publicDir || url[0] !== "/") {
return;
}
const fileName = cleanUrl(url);
const publicFiles = getPublicFiles(config);
if (publicFiles) {
return publicFiles.has(fileName) ? normalizePath$3(path$n.join(publicDir, fileName)) : void 0;
}
const publicFile = normalizePath$3(path$n.join(publicDir, fileName));
if (!publicFile.startsWith(withTrailingSlash(publicDir))) {
return;
}
return fs__default.existsSync(publicFile) ? publicFile : void 0;
}
const assetUrlRE = /__VITE_ASSET__([\w$]+)__(?:\$_(.*?)__)?/g;
const jsSourceMapRE = /\.[cm]?js\.map$/;
const assetCache = /* @__PURE__ */ new WeakMap();
const generatedAssets = /* @__PURE__ */ new WeakMap();
function registerCustomMime() {
mimes["ico"] = "image/x-icon";
mimes["flac"] = "audio/flac";
mimes["eot"] = "application/vnd.ms-fontobject";
}
function renderAssetUrlInJS(ctx, config, chunk, opts, code) {
const toRelativeRuntime = createToImportMetaURLBasedRelativeRuntime(
opts.format,
config.isWorker
);
let match;
let s;
assetUrlRE.lastIndex = 0;
while (match = assetUrlRE.exec(code)) {
s ||= new MagicString(code);
const [full, referenceId, postfix = ""] = match;
const file = ctx.getFileName(referenceId);
chunk.viteMetadata.importedAssets.add(cleanUrl(file));
const filename = file + postfix;
const replacement = toOutputFilePathInJS(
filename,
"asset",
chunk.fileName,
"js",
config,
toRelativeRuntime
);
const replacementString = typeof replacement === "string" ? JSON.stringify(encodeURIPath(replacement)).slice(1, -1) : `"+${replacement.runtime}+"`;
s.update(match.index, match.index + full.length, replacementString);
}
const publicAssetUrlMap = publicAssetUrlCache.get(config);
publicAssetUrlRE.lastIndex = 0;
while (match = publicAssetUrlRE.exec(code)) {
s ||= new MagicString(code);
const [full, hash] = match;
const publicUrl = publicAssetUrlMap.get(hash).slice(1);
const replacement = toOutputFilePathInJS(
publicUrl,
"public",
chunk.fileName,
"js",
config,
toRelativeRuntime
);
const replacementString = typeof replacement === "string" ? JSON.stringify(encodeURIPath(replacement)).slice(1, -1) : `"+${replacement.runtime}+"`;
s.update(match.index, match.index + full.length, replacementString);
}
return s;
}
function assetPlugin(config) {
registerCustomMime();
let moduleGraph;
return {
name: "vite:asset",
buildStart() {
assetCache.set(config, /* @__PURE__ */ new Map());
generatedAssets.set(config, /* @__PURE__ */ new Map());
},
configureServer(server) {
moduleGraph = server.moduleGraph;
},
resolveId(id) {
if (!config.assetsInclude(cleanUrl(id)) && !urlRE.test(id)) {
return;
}
const publicFile = checkPublicFile(id, config);
if (publicFile) {
return id;
}
},
async load(id) {
if (id[0] === "\0") {
return;
}
if (rawRE.test(id)) {
const file = checkPublicFile(id, config) || cleanUrl(id);
this.addWatchFile(file);
return `export default ${JSON.stringify(
await fsp.readFile(file, "utf-8")
)}`;
}
if (!urlRE.test(id) && !config.assetsInclude(cleanUrl(id))) {
return;
}
id = removeUrlQuery(id);
let url = await fileToUrl$1(id, config, this);
if (moduleGraph) {
const mod = moduleGraph.getModuleById(id);
if (mod && mod.lastHMRTimestamp > 0) {
url = injectQuery(url, `t=${mod.lastHMRTimestamp}`);
}
}
return {
code: `export default ${JSON.stringify(encodeURIPath(url))}`,
// Force rollup to keep this module from being shared between other entry points if it's an entrypoint.
// If the resulting chunk is empty, it will be removed in generateBundle.
moduleSideEffects: config.command === "build" && this.getModuleInfo(id)?.isEntry ? "no-treeshake" : false
};
},
renderChunk(code, chunk, opts) {
const s = renderAssetUrlInJS(this, config, chunk, opts, code);
if (s) {
return {
code: s.toString(),
map: config.build.sourcemap ? s.generateMap({ hires: "boundary" }) : null
};
} else {
return null;
}
},
generateBundle(_, bundle) {
for (const file in bundle) {
const chunk = bundle[file];
if (chunk.type === "chunk" && chunk.isEntry && chunk.moduleIds.length === 1 && config.assetsInclude(chunk.moduleIds[0])) {
delete bundle[file];
}
}
if (config.command === "build" && config.build.ssr && !config.build.ssrEmitAssets) {
for (const file in bundle) {
if (bundle[file].type === "asset" && !file.endsWith("ssr-manifest.json") && !jsSourceMapRE.test(file)) {
delete bundle[file];
}
}
}
}
};
}
async function fileToUrl$1(id, config, ctx) {
if (config.command === "serve") {
return fileToDevUrl(id, config);
} else {
return fileToBuiltUrl(id, config, ctx);
}
}
function fileToDevUrl(id, config, skipBase = false) {
let rtn;
if (checkPublicFile(id, config)) {
rtn = id;
} else if (id.startsWith(withTrailingSlash(config.root))) {
rtn = "/" + path$n.posix.relative(config.root, id);
} else {
rtn = path$n.posix.join(FS_PREFIX, id);
}
if (skipBase) {
return rtn;
}
const base = joinUrlSegments(config.server?.origin ?? "", config.decodedBase);
return joinUrlSegments(base, removeLeadingSlash(rtn));
}
function getPublicAssetFilename(hash, config) {
return publicAssetUrlCache.get(config)?.get(hash);
}
const publicAssetUrlCache = /* @__PURE__ */ new WeakMap();
const publicAssetUrlRE = /__VITE_PUBLIC_ASSET__([a-z\d]{8})__/g;
function publicFileToBuiltUrl(url, config) {
if (config.command !== "build") {
return joinUrlSegments(config.decodedBase, url);
}
const hash = getHash(url);
let cache = publicAssetUrlCache.get(config);
if (!cache) {
cache = /* @__PURE__ */ new Map();
publicAssetUrlCache.set(config, cache);
}
if (!cache.get(hash)) {
cache.set(hash, url);
}
return `__VITE_PUBLIC_ASSET__${hash}__`;
}
const GIT_LFS_PREFIX = Buffer$1.from("version https://git-lfs.github.com");
function isGitLfsPlaceholder(content) {
if (content.length < GIT_LFS_PREFIX.length) return false;
return GIT_LFS_PREFIX.compare(content, 0, GIT_LFS_PREFIX.length) === 0;
}
async function fileToBuiltUrl(id, config, pluginContext, skipPublicCheck = false, forceInline) {
if (!skipPublicCheck && checkPublicFile(id, config)) {
return publicFileToBuiltUrl(id, config);
}
const cache = assetCache.get(config);
const cached = cache.get(id);
if (cached) {
return cached;
}
const file = cleanUrl(id);
const content = await fsp.readFile(file);
let url;
if (shouldInline(config, file, id, content, pluginContext, forceInline)) {
if (config.build.lib && isGitLfsPlaceholder(content)) {
config.logger.warn(
colors$1.yellow(`Inlined file ${id} was not downloaded via Git LFS`)
);
}
if (file.endsWith(".svg")) {
url = svgToDataURL(content);
} else {
const mimeType = lookup(file) ?? "application/octet-stream";
url = `data:${mimeType};base64,${content.toString("base64")}`;
}
} else {
const { search, hash } = parse$h(id);
const postfix = (search || "") + (hash || "");
const originalFileName = normalizePath$3(path$n.relative(config.root, file));
const referenceId = pluginContext.emitFile({
type: "asset",
// Ignore directory structure for asset file names
name: path$n.basename(file),
originalFileName,
source: content
});
generatedAssets.get(config).set(referenceId, { originalFileName });
url = `__VITE_ASSET__${referenceId}__${postfix ? `$_${postfix}__` : ``}`;
}
cache.set(id, url);
return url;
}
async function urlToBuiltUrl(url, importer, config, pluginContext, forceInline) {
if (checkPublicFile(url, config)) {
return publicFileToBuiltUrl(url, config);
}
const file = url[0] === "/" ? path$n.join(config.root, url) : path$n.join(path$n.dirname(importer), url);
return fileToBuiltUrl(
file,
config,
pluginContext,
// skip public check since we just did it above
true,
forceInline
);
}
const shouldInline = (config, file, id, content, pluginContext, forceInline) => {
if (config.build.lib) return true;
if (pluginContext.getModuleInfo(id)?.isEntry) return false;
if (forceInline !== void 0) return forceInline;
let limit;
if (typeof config.build.assetsInlineLimit === "function") {
const userShouldInline = config.build.assetsInlineLimit(file, content);
if (userShouldInline != null) return userShouldInline;
limit = DEFAULT_ASSETS_INLINE_LIMIT;
} else {
limit = Number(config.build.assetsInlineLimit);
}
if (file.endsWith(".html")) return false;
if (file.endsWith(".svg") && id.includes("#")) return false;
return content.length < limit && !isGitLfsPlaceholder(content);
};
const nestedQuotesRE = /"[^"']*'[^"]*"|'[^'"]*"[^']*'/;
function svgToDataURL(content) {
const stringContent = content.toString();
if (stringContent.includes("<text") || stringContent.includes("<foreignObject") || nestedQuotesRE.test(stringContent)) {
return `data:image/svg+xml;base64,${content.toString("base64")}`;
} else {
return "data:image/svg+xml," + stringContent.trim().replaceAll(/>\s+</g, "><").replaceAll('"', "'").replaceAll("%", "%25").replaceAll("#", "%23").replaceAll("<", "%3c").replaceAll(">", "%3e").replaceAll(/\s+/g, "%20");
}
}
const endsWithJSRE = /\.[cm]?js$/;
function manifestPlugin(config) {
const manifest = {};
let outputCount;
return {
name: "vite:manifest",
buildStart() {
outputCount = 0;
},
generateBundle({ format }, bundle) {
function getChunkName(chunk) {
return getChunkOriginalFileName(chunk, config.root, format);
}
function getInternalImports(imports) {
const filteredImports = [];
for (const file of imports) {
if (bundle[file] === void 0) {
continue;
}
filteredImports.push(getChunkName(bundle[file]));
}
return filteredImports;
}
function createChunk(chunk) {
const manifestChunk = {
file: chunk.fileName,
name: chunk.name
};
if (chunk.facadeModuleId) {
manifestChunk.src = getChunkName(chunk);
}
if (chunk.isEntry) {
manifestChunk.isEntry = true;
}
if (chunk.isDynamicEntry) {
manifestChunk.isDynamicEntry = true;
}
if (chunk.imports.length) {
const internalImports = getInternalImports(chunk.imports);
if (internalImports.length > 0) {
manifestChunk.imports = internalImports;
}
}
if (chunk.dynamicImports.length) {
const internalImports = getInternalImports(chunk.dynamicImports);
if (internalImports.length > 0) {
manifestChunk.dynamicImports = internalImports;
}
}
if (chunk.viteMetadata?.importedCss.size) {
manifestChunk.css = [...chunk.viteMetadata.importedCss];
}
if (chunk.viteMetadata?.importedAssets.size) {
manifestChunk.assets = [...chunk.viteMetadata.importedAssets];
}
return manifestChunk;
}
function createAsset(asset, src, isEntry) {
const manifestChunk = {
file: asset.fileName,
src
};
if (isEntry) manifestChunk.isEntry = true;
return manifestChunk;
}
const assets = generatedAssets.get(config);
const entryCssAssetFileNames = /* @__PURE__ */ new Set();
for (const [id, asset] of assets.entries()) {
if (asset.isEntry) {
try {
const fileName = this.getFileName(id);
entryCssAssetFileNames.add(fileName);
} catch (error) {
assets.delete(id);
}
}
}
const fileNameToAsset = /* @__PURE__ */ new Map();
for (const file in bundle) {
const chunk = bundle[file];
if (chunk.type === "chunk") {
manifest[getChunkName(chunk)] = createChunk(chunk);
} else if (chunk.type === "asset" && typeof chunk.name === "string") {
const src = chunk.originalFileName ?? chunk.name;
const isEntry = entryCssAssetFileNames.has(chunk.fileName);
const asset = createAsset(chunk, src, isEntry);
const file2 = manifest[src]?.file;
if (file2 && endsWithJSRE.test(file2)) continue;
manifest[src] = asset;
fileNameToAsset.set(chunk.fileName, asset);
}
}
for (const [referenceId, { originalFileName }] of assets.entries()) {
if (!manifest[originalFileName]) {
const fileName = this.getFileName(referenceId);
const asset = fileNameToAsset.get(fileName);
if (asset) {
manifest[originalFileName] = asset;
}
}
}
outputCount++;
const output = config.build.rollupOptions?.output;
const outputLength = Array.isArray(output) ? output.length : 1;
if (outputCount >= outputLength) {
this.emitFile({
fileName: typeof config.build.manifest === "string" ? config.build.manifest : ".vite/manifest.json",
type: "asset",
source: JSON.stringify(sortObjectKeys(manifest), void 0, 2)
});
}
}
};
}
function getChunkOriginalFileName(chunk, root, format) {
if (chunk.facadeModuleId) {
let name = normalizePath$3(path$n.relative(root, chunk.facadeModuleId));
if (format === "system" && !chunk.name.includes("-legacy")) {
const ext = path$n.extname(name);
const endPos = ext.length !== 0 ? -ext.length : void 0;
name = name.slice(0, endPos) + `-legacy` + ext;
}
return name.replace(/\0/g, "");
} else {
return `_` + path$n.basename(chunk.fileName);
}
}
const dataUriRE = /^([^/]+\/[^;,]+)(;base64)?,([\s\S]*)$/;
const base64RE = /base64/i;
const dataUriPrefix = `\0/@data-uri/`;
function dataURIPlugin() {
let resolved;
return {
name: "vite:data-uri",
buildStart() {
resolved = /* @__PURE__ */ new Map();
},
resolveId(id) {
if (!dataUriRE.test(id)) {
return;
}
const uri = new URL$3(id);
if (uri.protocol !== "data:") {
return;
}
const match = dataUriRE.exec(uri.pathname);
if (!match) {
return;
}
const [, mime, format, data] = match;
if (mime !== "text/javascript") {
throw new Error(
`data URI with non-JavaScript mime type is not supported. If you're using legacy JavaScript MIME types (such as 'application/javascript'), please use 'text/javascript' instead.`
);
}
const base64 = format && base64RE.test(format.substring(1));
const content = base64 ? Buffer.from(data, "base64").toString("utf-8") : data;
resolved.set(id, content);
return dataUriPrefix + id;
},
load(id) {
if (id.startsWith(dataUriPrefix)) {
return resolved.get(id.slice(dataUriPrefix.length));
}
}
};
}
/* es-module-lexer 1.5.4 */
var ImportType;!function(A){A[A.Static=1]="Static",A[A.Dynamic=2]="Dynamic",A[A.ImportMeta=3]="ImportMeta",A[A.StaticSourcePhase=4]="StaticSourcePhase",A[A.DynamicSourcePhase=5]="DynamicSourcePhase";}(ImportType||(ImportType={}));const A=1===new Uint8Array(new Uint16Array([1]).buffer)[0];function parse$d(E,g="@"){if(!C)return init.then((()=>parse$d(E)));const I=E.length+1,w=(C.__heap_base.value||C.__heap_base)+4*I-C.memory.buffer.byteLength;w>0&&C.memory.grow(Math.ceil(w/65536));const K=C.sa(I-1);if((A?B:Q)(E,new Uint16Array(C.memory.buffer,K,I)),!C.parse())throw Object.assign(new Error(`Parse error ${g}:${E.slice(0,C.e()).split("\n").length}:${C.e()-E.lastIndexOf("\n",C.e()-1)}`),{idx:C.e()});const D=[],o=[];for(;C.ri();){const A=C.is(),Q=C.ie(),B=C.it(),g=C.ai(),I=C.id(),w=C.ss(),K=C.se();let o;C.ip()&&(o=k(E.slice(-1===I?A-1:A,-1===I?Q+1:Q))),D.push({n:o,t:B,s:A,e:Q,ss:w,se:K,d:I,a:g});}for(;C.re();){const A=C.es(),Q=C.ee(),B=C.els(),g=C.ele(),I=E.slice(A,Q),w=I[0],K=B<0?void 0:E.slice(B,g),D=K?K[0]:"";o.push({s:A,e:Q,ls:B,le:g,n:'"'===w||"'"===w?k(I):I,ln:'"'===D||"'"===D?k(K):K});}function k(A){try{return (0, eval)(A)}catch(A){}}return [D,o,!!C.f(),!!C.ms()]}function Q(A,Q){const B=A.length;let C=0;for(;C<B;){const B=A.charCodeAt(C);Q[C++]=(255&B)<<8|B>>>8;}}function B(A,Q){const B=A.length;let C=0;for(;C<B;)Q[C]=A.charCodeAt(C++);}let C;const init=WebAssembly.compile((E="AGFzbQEAAAABKwhgAX8Bf2AEf39/fwBgAAF/YAAAYAF/AGADf39/AX9gAn9/AX9gA39/fwADMTAAAQECAgICAgICAgICAgICAgICAgIAAwMDBAQAAAUAAAAAAAMDAwAGAAAABwAGAgUEBQFwAQEBBQMBAAEGDwJ/AUHA8gALfwBBwPIACwd6FQZtZW1vcnkCAAJzYQAAAWUAAwJpcwAEAmllAAUCc3MABgJzZQAHAml0AAgCYWkACQJpZAAKAmlwAAsCZXMADAJlZQANA2VscwAOA2VsZQAPAnJpABACcmUAEQFmABICbXMAEwVwYXJzZQAUC19faGVhcF9iYXNlAwEKm0EwaAEBf0EAIAA2AoAKQQAoAtwJIgEgAEEBdGoiAEEAOwEAQQAgAEECaiIANgKECkEAIAA2AogKQQBBADYC4AlBAEEANgLwCUEAQQA2AugJQQBBADYC5AlBAEEANgL4CUEAQQA2AuwJIAEL0wEBA39BACgC8AkhBEEAQQAoAogKIgU2AvAJQQAgBDYC9AlBACAFQSRqNgKICiAEQSBqQeAJIAQbIAU2AgBBACgC1AkhBEEAKALQCSEGIAUgATYCACAFIAA2AgggBSACIAJBAmpBACAGIANGIgAbIAQgA0YiBBs2AgwgBSADNgIUIAVBADYCECAFIAI2AgQgBUEANgIgIAVBA0EBQQIgABsgBBs2AhwgBUEAKALQCSADRiICOgAYAkACQCACDQBBACgC1AkgA0cNAQtBAEEBOgCMCgsLXgEBf0EAKAL4CSIEQRBqQeQJIAQbQQAoAogKIgQ2AgBBACAENgL4CUEAIARBFGo2AogKQQBBAToAjAogBEEANgIQIAQgAzYCDCAEIAI2AgggBCABNgIEIAQgADYCAAsIAEEAKAKQCgsVAEEAKALoCSgCAEEAKALcCWtBAXULHgEBf0EAKALoCSgCBCIAQQAoAtwJa0EBdUF/IAAbCxUAQQAoAugJKAIIQQAoAtwJa0EBdQseAQF/QQAoAugJKAIMIgBBACgC3AlrQQF1QX8gABsLCwBBACgC6AkoAhwLHgEBf0EAKALoCSgCECIAQQAoAtwJa0EBdUF/IAAbCzsBAX8CQEEAKALoCSgCFCIAQQAoAtAJRw0AQX8PCwJAIABBACgC1AlHDQBBfg8LIABBACgC3AlrQQF1CwsAQQAoAugJLQAYCxUAQQAoAuwJKAIAQQAoAtwJa0EBdQsVAEEAKALsCSgCBEEAKALcCWtBAXULHgEBf0EAKALsCSgCCCIAQQAoAtwJa0EBdUF/IAAbCx4BAX9BACgC7AkoAgwiAEEAKALcCWtBAXVBfyAAGwslAQF/QQBBACgC6AkiAEEgakHgCSAAGygCACIANgLoCSAAQQBHCyUBAX9BAEEAKALsCSIAQRBqQeQJIAAbKAIAIgA2AuwJIABBAEcLCABBAC0AlAoLCABBAC0AjAoL3Q0BBX8jAEGA0ABrIgAkAEEAQQE6AJQKQQBBACgC2Ak2ApwKQQBBACgC3AlBfmoiATYCsApBACABQQAoAoAKQQF0aiICNgK0CkEAQQA6AIwKQQBBADsBlgpBAEEAOwGYCkEAQQA6AKAKQQBBADYCkApBAEEAOgD8CUEAIABBgBBqNgKkCkEAIAA2AqgKQQBBADoArAoCQAJAAkACQANAQQAgAUECaiIDNgKwCiABIAJPDQECQCADLwEAIgJBd2pBBUkNAAJAAkACQAJAAkAgAkGbf2oOBQEICAgCAAsgAkEgRg0EIAJBL0YNAyACQTtGDQIMBwtBAC8BmAoNASADEBVFDQEgAUEEakGCCEEKEC8NARAWQQAtAJQKDQFBAEEAKAKwCiIBNgKcCgwHCyADEBVFDQAgAUEEakGMCEEKEC8NABAXC0EAQQAoArAKNgKcCgwBCwJAIAEvAQQiA0EqRg0AIANBL0cNBBAYDAELQQEQGQtBACgCtAohAkEAKAKwCiEBDAALC0EAIQIgAyEBQQAtAPwJDQIMAQtBACABNgKwCkEAQQA6AJQKCwNAQQAgAUECaiIDNgKwCgJAAkACQAJAAkACQAJAIAFBACgCtApPDQAgAy8BACICQXdqQQVJDQYCQAJAAkACQAJAAkACQAJAAkACQCACQWBqDgoQDwYPDw8PBQECAAsCQAJAAkACQCACQaB/ag4KCxISAxIBEhISAgALIAJBhX9qDgMFEQYJC0EALwGYCg0QIAMQFUUNECABQQRqQYIIQQoQLw0QEBYMEAsgAxAVRQ0PIAFBBGpBjAhBChAvDQ8QFwwPCyADEBVFDQ4gASkABELsgISDsI7AOVINDiABLwEMIgNBd2oiAUEXSw0MQQEgAXRBn4CABHFFDQwMDQtBAEEALwGYCiIBQQFqOwGYCkEAKAKkCiABQQN0aiIBQQE2AgAgAUEAKAKcCjYCBAwNC0EALwGYCiIDRQ0JQQAgA0F/aiIDOwGYCkEALwGWCiICRQ0MQQAoAqQKIANB//8DcUEDdGooAgBBBUcNDAJAIAJBAnRBACgCqApqQXxqKAIAIgMoAgQNACADQQAoApwKQQJqNgIEC0EAIAJBf2o7AZYKIAMgAUEEajYCDAwMCwJAQQAoApwKIgEvAQBBKUcNAEEAKALwCSIDRQ0AIAMoAgQgAUcNAEEAQQAoAvQJIgM2AvAJAkAgA0UNACADQQA2AiAMAQtBAEEANgLgCQtBAEEALwGYCiIDQQFqOwGYCkEAKAKkCiADQQN0aiIDQQZBAkEALQCsChs2AgAgAyABNgIEQQBBADoArAoMCwtBAC8BmAoiAUUNB0EAIAFBf2oiATsBmApBACgCpAogAUH//wNxQQN0aigCAEEERg0EDAoLQScQGgwJC0EiEBoMCAsgAkEvRw0HAkACQCABLwEEIgFBKkYNACABQS9HDQEQGAwKC0EBEBkMCQsCQAJAAkACQEEAKAKcCiIBLwEAIgMQG0UNAAJAAkAgA0FVag4EAAkBAwkLIAFBfmovAQBBK0YNAwwICyABQX5qLwEAQS1GDQIMBwsgA0EpRw0BQQAoAqQKQQAvAZgKIgJBA3RqKAIEEBxFDQIMBgsgAUF+ai8BAEFQakH//wNxQQpPDQULQQAvAZgKIQILAkACQCACQf//A3EiAkUNACADQeYARw0AQQAoAqQKIAJBf2pBA3RqIgQoAgBBAUcNACABQX5qLwEAQe8ARw0BIAQoAgRBlghBAxAdRQ0BDAULIANB/QBHDQBBACgCpAogAkEDdGoiAigCBBAeDQQgAigCAEEGRg0ECyABEB8NAyADRQ0DIANBL0ZBAC0AoApBAEdxDQMCQEEAKAL4CSICRQ0AIAEgAigCAEkNACABIAIoAgRNDQQLIAFBfmohAUEAKALcCSECAkADQCABQQJqIgQgAk0NAUEAIAE2ApwKIAEvAQAhAyABQX5qIgQhASADECBFDQALIARBAmohBAsCQCADQf//A3EQIUUNACAEQX5qIQECQANAIAFBAmoiAyACTQ0BQQAgATYCnAogAS8BACEDIAFBfmoiBCEBIAMQIQ0ACyAEQQJqIQMLIAMQIg0EC0EAQQE6AKAKDAcLQQAoAqQKQQAvAZgKIgFBA3QiA2pBACgCnAo2AgRBACABQQFqOwGYCkEAKAKkCiADakEDNgIACxAjDAULQQAtAPwJQQAvAZYKQQAvAZgKcnJFIQIMBwsQJEEAQQA6AKAKDAMLECVBACECDAULIANBoAFHDQELQQBBAToArAoLQQBBACgCsAo2ApwKC0EAKAKwCiEBDAALCyAAQYDQAGokACACCxoAAkBBACgC3AkgAEcNAEEBDwsgAEF+ahAmC/4KAQZ/QQBBACgCsAoiAEEMaiIBNgKwCkEAKAL4CSECQQEQKSEDAkACQAJAAkACQAJAAkACQAJAQQAoArAKIgQgAUcNACADEChFDQELAkACQAJAAkACQAJAAkAgA0EqRg0AIANB+wBHDQFBACAEQQJqNgKwCkEBECkhA0EAKAKwCiEEA0ACQAJAIANB//8DcSIDQSJGDQAgA0EnRg0AIAMQLBpBACgCsAohAwwBCyADEBpBAEEAKAKwCkECaiIDNgKwCgtBARApGgJAIAQgAxAtIgNBLEcNAEEAQQAoArAKQQJqNgKwCkEBECkhAwsgA0H9AEYNA0EAKAKwCiIFIARGDQ8gBSEEIAVBACgCtApNDQAMDwsLQQAgBEECajYCsApBARApGkEAKAKwCiIDIAMQLRoMAgtBAEEAOgCUCgJAAkACQAJAAkACQCADQZ9/ag4MAgsEAQsDCwsLCwsFAAsgA0H2AEYNBAwKC0EAIARBDmoiAzYCsAoCQAJAAkBBARApQZ9/ag4GABICEhIBEgtBACgCsAoiBSkAAkLzgOSD4I3AMVINESAFLwEKECFFDRFBACAFQQpqNgKwCkEAECkaC0EAKAKwCiIFQQJqQbIIQQ4QLw0QIAUvARAiAkF3aiIBQRdLDQ1BASABdEGfgIAEcUUNDQwOC0EAKAKwCiIFKQACQuyAhIOwjsA5Ug0PIAUvAQoiAkF3aiIBQRdNDQYMCgtBACAEQQpqNgKwCkEAECkaQQAoArAKIQQLQQAgBEEQajYCsAoCQEEBECkiBEEqRw0AQQBBACgCsApBAmo2ArAKQQEQKSEEC0EAKAKwCiEDIAQQLBogA0EAKAKwCiIEIAMgBBACQQBBACgCsApBfmo2ArAKDwsCQCAEKQACQuyAhIOwjsA5Ug0AIAQvAQoQIEUNAEEAIARBCmo2ArAKQQEQKSEEQQAoArAKIQMgBBAsGiADQQAoArAKIgQgAyAEEAJBAEEAKAKwCkF+ajYCsAoPC0EAIARBBGoiBDYCsAoLQQAgBEEGajYCsApBAEEAOgCUCkEBECkhBEEAKAKwCiEDIAQQLCEEQQAoArAKIQIgBEHf/wNxIgFB2wBHDQNBACACQQJqNgKwCkEBECkhBUEAKAKwCiEDQQAhBAwEC0EAQQE6AIwKQQBBACgCsApBAmo2ArAKC0EBECkhBEEAKAKwCiEDAkAgBEHmAEcNACADQQJqQawIQQYQLw0AQQAgA0EIajYCsAogAEEBEClBABArIAJBEGpB5AkgAhshAwNAIAMoAgAiA0UNBSADQgA3AgggA0EQaiEDDAALC0EAIANBfmo2ArAKDAMLQQEgAXRBn4CABHFFDQMMBAtBASEECwNAAkACQCAEDgIAAQELIAVB//8DcRAsGkEBIQQMAQsCQAJAQQAoArAKIgQgA0YNACADIAQgAyAEEAJBARApIQQCQCABQdsARw0AIARBIHJB/QBGDQQLQQAoArAKIQMCQCAEQSxHDQBBACADQQJqNgKwCkEBECkhBUEAKAKwCiEDIAVBIHJB+wBHDQILQQAgA0F+ajYCsAoLIAFB2wBHDQJBACACQX5qNgKwCg8LQQAhBAwACwsPCyACQaABRg0AIAJB+wBHDQQLQQAgBUEKajYCsApBARApIgVB+wBGDQMMAgsCQCACQVhqDgMBAwEACyACQaABRw0CC0EAIAVBEGo2ArAKAkBBARApIgVBKkcNAEEAQQAoArAKQQJqNgKwCkEBECkhBQsgBUEoRg0BC0EAKAKwCiEBIAUQLBpBACgCsAoiBSABTQ0AIAQgAyABIAUQAkEAQQAoArAKQX5qNgKwCg8LIAQgA0EAQQAQAkEAIARBDGo2ArAKDwsQJQvcCAEGf0EAIQBBAEEAKAKwCiIBQQxqIgI2ArAKQQEQKSEDQQAoArAKIQQCQAJAAkACQAJAAkACQAJAIANBLkcNAEEAIARBAmo2ArAKAkBBARApIgNB8wBGDQAgA0HtAEcNB0EAKAKwCiIDQQJqQZwIQQYQLw0HAkBBACgCnAoiBBAqDQAgBC8BAEEuRg0ICyABIAEgA0EIakEAKALUCRABDwtBACgCsAoiA0ECakGiCEEKEC8NBgJAQQAoApwKIgQQKg0AIAQvAQBBLkYNBwsgA0EMaiEDDAELIANB8wBHDQEgBCACTQ0BQQYhAEEAIQIgBEECakGiCEEKEC8NAiAEQQxqIQMCQCAELwEMIgVBd2oiBEEXSw0AQQEgBHRBn4CABHENAQsgBUGgAUcNAgtBACADNgKwCkEBIQBBARApIQMLAkACQAJAAkAgA0H7AEYNACADQShHDQFBACgCpApBAC8BmAoiA0EDdGoiBEEAKAKwCjYCBEEAIANBAWo7AZgKIARBBTYCAEEAKAKcCi8BAEEuRg0HQQBBACgCsAoiBEECajYCsApBARApIQMgAUEAKAKwCkEAIAQQAQJAAkAgAA0AQQAoAvAJIQQMAQtBACgC8AkiBEEFNgIcC0EAQQAvAZYKIgBBAWo7AZYKQQAoAqgKIABBAnRqIAQ2AgACQCADQSJGDQAgA0EnRg0AQQBBACgCsApBfmo2ArAKDwsgAxAaQQBBACgCsApBAmoiAzYCsAoCQAJAAkBBARApQVdqDgQBAgIAAgtBAEEAKAKwCkECajYCsApBARApGkEAKALwCSIEIAM2AgQgBEEBOgAYIARBACgCsAoiAzYCEEEAIANBfmo2ArAKDwtBACgC8AkiBCADNgIEIARBAToAGEEAQQAvAZgKQX9qOwGYCiAEQQAoArAKQQJqNgIMQQBBAC8BlgpBf2o7AZYKDwtBAEEAKAKwCkF+ajYCsAoPCyAADQJBACgCsAohA0EALwGYCg0BA0ACQAJAAkAgA0EAKAK0Ck8NAEEBECkiA0EiRg0BIANBJ0YNASADQf0ARw0CQQBBACgCsApBAmo2ArAKC0EBECkhBEEAKAKwCiEDAkAgBEHmAEcNACADQQJqQawIQQYQLw0JC0EAIANBCGo2ArAKAkBBARApIgNBIkYNACADQSdHDQkLIAEgA0EAECsPCyADEBoLQQBBACgCsApBAmoiAzYCsAoMAAsLIAANAUEGIQBBACECAkAgA0FZag4EBAMDBAALIANBIkYNAwwCC0EAIANBfmo2ArAKDwtBDCEAQQEhAgtBACgCsAoiAyABIABBAXRqRw0AQQAgA0F+ajYCsAoPC0EALwGYCg0CQQAoArAKIQNBACgCtAohAANAIAMgAE8NAQJAAkAgAy8BACIEQSdGDQAgBEEiRw0BCyABIAQgAhArDwtBACADQQJqIgM2ArAKDAALCxAlCw8LQQBBACgCsApBfmo2ArAKC0cBA39BACgCsApBAmohAEEAKAK0CiEBAkADQCAAIgJBfmogAU8NASACQQJqIQAgAi8BAEF2ag4EAQAAAQALC0EAIAI2ArAKC5gBAQN/QQBBACgCsAoiAUECajYCsAogAUEGaiEBQQAoArQKIQIDQAJAAkACQCABQXxqIAJPDQAgAUF+ai8BACEDAkACQCAADQAgA0EqRg0BIANBdmoOBAIEBAIECyADQSpHDQMLIAEvAQBBL0cNAkEAIAFBfmo2ArAKDAELIAFBfmohAQtBACABNgKwCg8LIAFBAmohAQwACwuIAQEEf0EAKAKwCiEBQQAoArQKIQICQAJAA0AgASIDQQJqIQEgAyACTw0BIAEvAQAiBCAARg0CAkAgBEHcAEYNACAEQXZqDgQCAQECAQsgA0EEaiEBIAMvAQRBDUcNACADQQZqIAEgAy8BBkEKRhshAQwACwtBACABNgKwChAlDwtBACABNgKwCgtsAQF/AkACQCAAQV9qIgFBBUsNAEEBIAF0QTFxDQELIABBRmpB//8DcUEGSQ0AIABBKUcgAEFYakH//wNxQQdJcQ0AAkAgAEGlf2oOBAEAAAEACyAAQf0ARyAAQYV/akH//wNxQQRJcQ8LQQELLgEBf0EBIQECQCAAQaYJQQUQHQ0AIABBlghBAxAdDQAgAEGwCUECEB0hAQsgAQtGAQN/QQAhAwJAIAAgAkEBdCICayIEQQJqIgBBACgC3AkiBUkNACAAIAEgAhAvDQACQCAAIAVHDQBBAQ8LIAQQJiEDCyADC4MBAQJ/QQEhAQJAAkACQAJAAkACQCAALwEAIgJBRWoOBAUEBAEACwJAIAJBm39qDgQDBAQCAAsgAkEpRg0EIAJB+QBHDQMgAEF+akG8CUEGEB0PCyAAQX5qLwEAQT1GDwsgAEF+akG0CUEEEB0PCyAAQX5qQcgJQQMQHQ8LQQAhAQsgAQu0AwECf0EAIQECQAJAAkACQAJAAkACQAJAAkACQCAALwEAQZx/ag4UAAECCQkJCQMJCQQFCQkGCQcJCQgJCwJAAkAgAEF+ai8BAEGXf2oOBAAKCgEKCyAAQXxqQcoIQQIQHQ8LIABBfGpBzghBAxAdDwsCQAJAAkAgAEF+ai8BAEGNf2oOAwABAgoLAkAgAEF8ai8BACICQeEARg0AIAJB7ABHDQogAEF6akHlABAnDwsgAEF6akHjABAnDwsgAEF8akHUCEEEEB0PCyAAQXxqQdwIQQYQHQ8LIABBfmovAQBB7wBHDQYgAEF8ai8BAEHlAEcNBgJAIABBemovAQAiAkHwAEYNACACQeMARw0HIABBeGpB6AhBBhAdDwsgAEF4akH0CEECEB0PCyAAQX5qQfgIQQQQHQ8LQQEhASAAQX5qIgBB6QAQJw0EIABBgAlBBRAdDwsgAEF+akHkABAnDwsgAEF+akGKCUEHEB0PCyAAQX5qQZgJQQQQHQ8LAkAgAEF+ai8BACICQe8ARg0AIAJB5QBHDQEgAEF8akHuABAnDwsgAEF8akGgCUEDEB0hAQsgAQs0AQF/QQEhAQJAIABBd2pB//8DcUEFSQ0AIABBgAFyQaABRg0AIABBLkcgABAocSEBCyABCzABAX8CQAJAIABBd2oiAUEXSw0AQQEgAXRBjYCABHENAQsgAEGgAUYNAEEADwtBAQtOAQJ/QQAhAQJAAkAgAC8BACICQeUARg0AIAJB6wBHDQEgAEF+akH4CEEEEB0PCyAAQX5qLwEAQfUARw0AIABBfGpB3AhBBhAdIQELIAEL3gEBBH9BACgCsAohAEEAKAK0CiEBAkACQAJAA0AgACICQQJqIQAgAiABTw0BAkACQAJAIAAvAQAiA0Gkf2oOBQIDAwMBAAsgA0EkRw0CIAIvAQRB+wBHDQJBACACQQRqIgA2ArAKQQBBAC8BmAoiAkEBajsBmApBACgCpAogAkEDdGoiAkEENgIAIAIgADYCBA8LQQAgADYCsApBAEEALwGYCkF/aiIAOwGYCkEAKAKkCiAAQf//A3FBA3RqKAIAQQNHDQMMBAsgAkEEaiEADAALC0EAIAA2ArAKCxAlCwtwAQJ/AkACQANAQQBBACgCsAoiAEECaiIBNgKwCiAAQQAoArQKTw0BAkACQAJAIAEvAQAiAUGlf2oOAgECAAsCQCABQXZqDgQEAwMEAAsgAUEvRw0CDAQLEC4aDAELQQAgAEEEajYCsAoMAAsLECULCzUBAX9BAEEBOgD8CUEAKAKwCiEAQQBBACgCtApBAmo2ArAKQQAgAEEAKALcCWtBAXU2ApAKC0MBAn9BASEBAkAgAC8BACICQXdqQf//A3FBBUkNACACQYABckGgAUYNAEEAIQEgAhAoRQ0AIAJBLkcgABAqcg8LIAELPQECf0EAIQICQEEAKALcCSIDIABLDQAgAC8BACABRw0AAkAgAyAARw0AQQEPCyAAQX5qLwEAECAhAgsgAgtoAQJ/QQEhAQJAAkAgAEFfaiICQQVLDQBBASACdEExcQ0BCyAAQfj/A3FBKEYNACAAQUZqQf//A3FBBkkNAAJAIABBpX9qIgJBA0sNACACQQFHDQELIABBhX9qQf//A3FBBEkhAQsgAQucAQEDf0EAKAKwCiEBAkADQAJAAkAgAS8BACICQS9HDQACQCABLwECIgFBKkYNACABQS9HDQQQGAwCCyAAEBkMAQsCQAJAIABFDQAgAkF3aiIBQRdLDQFBASABdEGfgIAEcUUNAQwCCyACECFFDQMMAQsgAkGgAUcNAgtBAEEAKAKwCiIDQQJqIgE2ArAKIANBACgCtApJDQALCyACCzEBAX9BACEBAkAgAC8BAEEuRw0AIABBfmovAQBBLkcNACAAQXxqLwEAQS5GIQELIAELnAQBAX8CQCABQSJGDQAgAUEnRg0AECUPC0EAKAKwCiEDIAEQGiAAIANBAmpBACgCsApBACgC0AkQAQJAIAJFDQBBACgC8AlBBDYCHAtBAEEAKAKwCkECajYCsAoCQAJAAkACQEEAECkiAUHhAEYNACABQfcARg0BQQAoArAKIQEMAgtBACgCsAoiAUECakHACEEKEC8NAUEGIQAMAgtBACgCsAoiAS8BAkHpAEcNACABLwEEQfQARw0AQQQhACABLwEGQegARg0BC0EAIAFBfmo2ArAKDwtBACABIABBAXRqNgKwCgJAQQEQKUH7AEYNAEEAIAE2ArAKDwtBACgCsAoiAiEAA0BBACAAQQJqNgKwCgJAAkACQEEBECkiAEEiRg0AIABBJ0cNAUEnEBpBAEEAKAKwCkECajYCsApBARApIQAMAgtBIhAaQQBBACgCsApBAmo2ArAKQQEQKSEADAELIAAQLCEACwJAIABBOkYNAEEAIAE2ArAKDwtBAEEAKAKwCkECajYCsAoCQEEBECkiAEEiRg0AIABBJ0YNAEEAIAE2ArAKDwsgABAaQQBBACgCsApBAmo2ArAKAkACQEEBECkiAEEsRg0AIABB/QBGDQFBACABNgKwCg8LQQBBACgCsApBAmo2ArAKQQEQKUH9AEYNAEEAKAKwCiEADAELC0EAKALwCSIBIAI2AhAgAUEAKAKwCkECajYCDAttAQJ/AkACQANAAkAgAEH//wNxIgFBd2oiAkEXSw0AQQEgAnRBn4CABHENAgsgAUGgAUYNASAAIQIgARAoDQJBACECQQBBACgCsAoiAEECajYCsAogAC8BAiIADQAMAgsLIAAhAgsgAkH//wNxC6sBAQR/AkACQEEAKAKwCiICLwEAIgNB4QBGDQAgASEEIAAhBQwBC0EAIAJBBGo2ArAKQQEQKSECQQAoArAKIQUCQAJAIAJBIkYNACACQSdGDQAgAhAsGkEAKAKwCiEEDAELIAIQGkEAQQAoArAKQQJqIgQ2ArAKC0EBECkhA0EAKAKwCiECCwJAIAIgBUYNACAFIARBACAAIAAgAUYiAhtBACABIAIbEAILIAMLcgEEf0EAKAKwCiEAQQAoArQKIQECQAJAA0AgAEECaiECIAAgAU8NAQJAAkAgAi8BACIDQaR/ag4CAQQACyACIQAgA0F2ag4EAgEBAgELIABBBGohAAwACwtBACACNgKwChAlQQAPC0EAIAI2ArAKQd0AC0kBA39BACEDAkAgAkUNAAJAA0AgAC0AACIEIAEtAAAiBUcNASABQQFqIQEgAEEBaiEAIAJBf2oiAg0ADAILCyAEIAVrIQMLIAMLC+wBAgBBgAgLzgEAAHgAcABvAHIAdABtAHAAbwByAHQAZgBvAHIAZQB0AGEAbwB1AHIAYwBlAHIAbwBtAHUAbgBjAHQAaQBvAG4AcwBzAGUAcgB0AHYAbwB5AGkAZQBkAGUAbABlAGMAbwBuAHQAaQBuAGkAbgBzAHQAYQBuAHQAeQBiAHIAZQBhAHIAZQB0AHUAcgBkAGUAYgB1AGcAZwBlAGEAdwBhAGkAdABoAHIAdwBoAGkAbABlAGkAZgBjAGEAdABjAGYAaQBuAGEAbABsAGUAbABzAABB0AkLEAEAAAACAAAAAAQAAEA5AAA=","undefined"!=typeof Buffer?Buffer.from(E,"base64"):Uint8Array.from(atob(E),(A=>A.charCodeAt(0))))).then(WebAssembly.instantiate).then((({exports:A})=>{C=A;}));var E;
var convertSourceMap$1 = {};
(function (exports) {
Object.defineProperty(exports, 'commentRegex', {
get: function getCommentRegex () {
// Groups: 1: media type, 2: MIME type, 3: charset, 4: encoding, 5: data.
return /^\s*?\/[\/\*][@#]\s+?sourceMappingURL=data:(((?:application|text)\/json)(?:;charset=([^;,]+?)?)?)?(?:;(base64))?,(.*?)$/mg;
}
});
Object.defineProperty(exports, 'mapFileCommentRegex', {
get: function getMapFileCommentRegex () {
// Matches sourceMappingURL in either // or /* comment styles.
return /(?:\/\/[@#][ \t]+?sourceMappingURL=([^\s'"`]+?)[ \t]*?$)|(?:\/\*[@#][ \t]+sourceMappingURL=([^*]+?)[ \t]*?(?:\*\/){1}[ \t]*?$)/mg;
}
});
var decodeBase64;
if (typeof Buffer !== 'undefined') {
if (typeof Buffer.from === 'function') {
decodeBase64 = decodeBase64WithBufferFrom;
} else {
decodeBase64 = decodeBase64WithNewBuffer;
}
} else {
decodeBase64 = decodeBase64WithAtob;
}
function decodeBase64WithBufferFrom(base64) {
return Buffer.from(base64, 'base64').toString();
}
function decodeBase64WithNewBuffer(base64) {
if (typeof value === 'number') {
throw new TypeError('The value to decode must not be of type number.');
}
return new Buffer(base64, 'base64').toString();
}
function decodeBase64WithAtob(base64) {
return decodeURIComponent(escape(atob(base64)));
}
function stripComment(sm) {
return sm.split(',').pop();
}
function readFromFileMap(sm, read) {
var r = exports.mapFileCommentRegex.exec(sm);
// for some odd reason //# .. captures in 1 and /* .. */ in 2
var filename = r[1] || r[2];
try {
var sm = read(filename);
if (sm != null && typeof sm.catch === 'function') {
return sm.catch(throwError);
} else {
return sm;
}
} catch (e) {
throwError(e);
}
function throwError(e) {
throw new Error('An error occurred while trying to read the map file at ' + filename + '\n' + e.stack);
}
}
function Converter (sm, opts) {
opts = opts || {};
if (opts.hasComment) {
sm = stripComment(sm);
}
if (opts.encoding === 'base64') {
sm = decodeBase64(sm);
} else if (opts.encoding === 'uri') {
sm = decodeURIComponent(sm);
}
if (opts.isJSON || opts.encoding) {
sm = JSON.parse(sm);
}
this.sourcemap = sm;
}
Converter.prototype.toJSON = function (space) {
return JSON.stringify(this.sourcemap, null, space);
};
if (typeof Buffer !== 'undefined') {
if (typeof Buffer.from === 'function') {
Converter.prototype.toBase64 = encodeBase64WithBufferFrom;
} else {
Converter.prototype.toBase64 = encodeBase64WithNewBuffer;
}
} else {
Converter.prototype.toBase64 = encodeBase64WithBtoa;
}
function encodeBase64WithBufferFrom() {
var json = this.toJSON();
return Buffer.from(json, 'utf8').toString('base64');
}
function encodeBase64WithNewBuffer() {
var json = this.toJSON();
if (typeof json === 'number') {
throw new TypeError('The json to encode must not be of type number.');
}
return new Buffer(json, 'utf8').toString('base64');
}
function encodeBase64WithBtoa() {
var json = this.toJSON();
return btoa(unescape(encodeURIComponent(json)));
}
Converter.prototype.toURI = function () {
var json = this.toJSON();
return encodeURIComponent(json);
};
Converter.prototype.toComment = function (options) {
var encoding, content, data;
if (options != null && options.encoding === 'uri') {
encoding = '';
content = this.toURI();
} else {
encoding = ';base64';
content = this.toBase64();
}
data = 'sourceMappingURL=data:application/json;charset=utf-8' + encoding + ',' + content;
return options != null && options.multiline ? '/*# ' + data + ' */' : '//# ' + data;
};
// returns copy instead of original
Converter.prototype.toObject = function () {
return JSON.parse(this.toJSON());
};
Converter.prototype.addProperty = function (key, value) {
if (this.sourcemap.hasOwnProperty(key)) throw new Error('property "' + key + '" already exists on the sourcemap, use set property instead');
return this.setProperty(key, value);
};
Converter.prototype.setProperty = function (key, value) {
this.sourcemap[key] = value;
return this;
};
Converter.prototype.getProperty = function (key) {
return this.sourcemap[key];
};
exports.fromObject = function (obj) {
return new Converter(obj);
};
exports.fromJSON = function (json) {
return new Converter(json, { isJSON: true });
};
exports.fromURI = function (uri) {
return new Converter(uri, { encoding: 'uri' });
};
exports.fromBase64 = function (base64) {
return new Converter(base64, { encoding: 'base64' });
};
exports.fromComment = function (comment) {
var m, encoding;
comment = comment
.replace(/^\/\*/g, '//')
.replace(/\*\/$/g, '');
m = exports.commentRegex.exec(comment);
encoding = m && m[4] || 'uri';
return new Converter(comment, { encoding: encoding, hasComment: true });
};
function makeConverter(sm) {
return new Converter(sm, { isJSON: true });
}
exports.fromMapFileComment = function (comment, read) {
if (typeof read === 'string') {
throw new Error(
'String directory paths are no longer supported with `fromMapFileComment`\n' +
'Please review the Upgrading documentation at https://github.com/thlorenz/convert-source-map#upgrading'
)
}
var sm = readFromFileMap(comment, read);
if (sm != null && typeof sm.then === 'function') {
return sm.then(makeConverter);
} else {
return makeConverter(sm);
}
};
// Finds last sourcemap comment in file or returns null if none was found
exports.fromSource = function (content) {
var m = content.match(exports.commentRegex);
return m ? exports.fromComment(m.pop()) : null;
};
// Finds last sourcemap comment in file or returns null if none was found
exports.fromMapFileSource = function (content, read) {
if (typeof read === 'string') {
throw new Error(
'String directory paths are no longer supported with `fromMapFileSource`\n' +
'Please review the Upgrading documentation at https://github.com/thlorenz/convert-source-map#upgrading'
)
}
var m = content.match(exports.mapFileCommentRegex);
return m ? exports.fromMapFileComment(m.pop(), read) : null;
};
exports.removeComments = function (src) {
return src.replace(exports.commentRegex, '');
};
exports.removeMapFileComments = function (src) {
return src.replace(exports.mapFileCommentRegex, '');
};
exports.generateMapFileComment = function (file, options) {
var data = 'sourceMappingURL=' + file;
return options && options.multiline ? '/*# ' + data + ' */' : '//# ' + data;
};
} (convertSourceMap$1));
var convertSourceMap = /*@__PURE__*/getDefaultExportFromCjs(convertSourceMap$1);
const debug$g = createDebugger("vite:sourcemap", {
onlyWhenFocused: true
});
const virtualSourceRE = /^(?:dep:|browser-external:|virtual:)|\0/;
async function computeSourceRoute(map, file) {
let sourceRoot;
try {
sourceRoot = await fsp.realpath(
path$n.resolve(path$n.dirname(file), map.sourceRoot || "")
);
} catch {
}
return sourceRoot;
}
async function injectSourcesContent(map, file, logger) {
let sourceRootPromise;
const missingSources = [];
const sourcesContent = map.sourcesContent || [];
const sourcesContentPromises = [];
for (let index = 0; index < map.sources.length; index++) {
const sourcePath = map.sources[index];
if (sourcesContent[index] == null && sourcePath && !virtualSourceRE.test(sourcePath)) {
sourcesContentPromises.push(
(async () => {
sourceRootPromise ??= computeSourceRoute(map, file);
const sourceRoot = await sourceRootPromise;
let resolvedSourcePath = cleanUrl(decodeURI(sourcePath));
if (sourceRoot) {
resolvedSourcePath = path$n.resolve(sourceRoot, resolvedSourcePath);
}
sourcesContent[index] = await fsp.readFile(resolvedSourcePath, "utf-8").catch(() => {
missingSources.push(resolvedSourcePath);
return null;
});
})()
);
}
}
await Promise.all(sourcesContentPromises);
map.sourcesContent = sourcesContent;
if (missingSources.length) {
logger.warnOnce(`Sourcemap for "${file}" points to missing source files`);
debug$g?.(`Missing sources:
` + missingSources.join(`
`));
}
}
function genSourceMapUrl(map) {
if (typeof map !== "string") {
map = JSON.stringify(map);
}
return `data:application/json;base64,${Buffer.from(map).toString("base64")}`;
}
function getCodeWithSourcemap(type, code, map) {
if (debug$g) {
code += `
/*${JSON.stringify(map, null, 2).replace(/\*\//g, "*\\/")}*/
`;
}
if (type === "js") {
code += `
//# sourceMappingURL=${genSourceMapUrl(map)}`;
} else if (type === "css") {
code += `
/*# sourceMappingURL=${genSourceMapUrl(map)} */`;
}
return code;
}
function applySourcemapIgnoreList(map, sourcemapPath, sourcemapIgnoreList, logger) {
let { x_google_ignoreList } = map;
if (x_google_ignoreList === void 0) {
x_google_ignoreList = [];
}
for (let sourcesIndex = 0; sourcesIndex < map.sources.length; ++sourcesIndex) {
const sourcePath = map.sources[sourcesIndex];
if (!sourcePath) continue;
const ignoreList = sourcemapIgnoreList(
path$n.isAbsolute(sourcePath) ? sourcePath : path$n.resolve(path$n.dirname(sourcemapPath), sourcePath),
sourcemapPath
);
if (logger && typeof ignoreList !== "boolean") {
logger.warn("sourcemapIgnoreList function must return a boolean.");
}
if (ignoreList && !x_google_ignoreList.includes(sourcesIndex)) {
x_google_ignoreList.push(sourcesIndex);
}
}
if (x_google_ignoreList.length > 0) {
if (!map.x_google_ignoreList) map.x_google_ignoreList = x_google_ignoreList;
}
}
async function extractSourcemapFromFile(code, filePath) {
const map = (convertSourceMap.fromSource(code) || await convertSourceMap.fromMapFileSource(
code,
createConvertSourceMapReadMap(filePath)
))?.toObject();
if (map) {
return {
code: code.replace(convertSourceMap.mapFileCommentRegex, blankReplacer),
map
};
}
}
function createConvertSourceMapReadMap(originalFileName) {
return (filename) => {
return fsp.readFile(
path$n.resolve(path$n.dirname(originalFileName), filename),
"utf-8"
);
};
}
var tasks = {};
var utils$g = {};
var array$1 = {};
Object.defineProperty(array$1, "__esModule", { value: true });
array$1.splitWhen = array$1.flatten = void 0;
function flatten$1(items) {
return items.reduce((collection, item) => [].concat(collection, item), []);
}
array$1.flatten = flatten$1;
function splitWhen(items, predicate) {
const result = [[]];
let groupIndex = 0;
for (const item of items) {
if (predicate(item)) {
groupIndex++;
result[groupIndex] = [];
}
else {
result[groupIndex].push(item);
}
}
return result;
}
array$1.splitWhen = splitWhen;
var errno$1 = {};
Object.defineProperty(errno$1, "__esModule", { value: true });
errno$1.isEnoentCodeError = void 0;
function isEnoentCodeError(error) {
return error.code === 'ENOENT';
}
errno$1.isEnoentCodeError = isEnoentCodeError;
var fs$i = {};
Object.defineProperty(fs$i, "__esModule", { value: true });
fs$i.createDirentFromStats = void 0;
let DirentFromStats$1 = class DirentFromStats {
constructor(name, stats) {
this.name = name;
this.isBlockDevice = stats.isBlockDevice.bind(stats);
this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
this.isDirectory = stats.isDirectory.bind(stats);
this.isFIFO = stats.isFIFO.bind(stats);
this.isFile = stats.isFile.bind(stats);
this.isSocket = stats.isSocket.bind(stats);
this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
}
};
function createDirentFromStats$1(name, stats) {
return new DirentFromStats$1(name, stats);
}
fs$i.createDirentFromStats = createDirentFromStats$1;
var path$i = {};
Object.defineProperty(path$i, "__esModule", { value: true });
path$i.convertPosixPathToPattern = path$i.convertWindowsPathToPattern = path$i.convertPathToPattern = path$i.escapePosixPath = path$i.escapeWindowsPath = path$i.escape = path$i.removeLeadingDotSegment = path$i.makeAbsolute = path$i.unixify = void 0;
const os$4 = require$$2;
const path$h = require$$0$4;
const IS_WINDOWS_PLATFORM = os$4.platform() === 'win32';
const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\
/**
* All non-escaped special characters.
* Posix: ()*?[]{|}, !+@ before (, ! at the beginning, \\ before non-special characters.
* Windows: (){}[], !+@ before (, ! at the beginning.
*/
const POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g;
const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()[\]{}]|^!|[!+@](?=\())/g;
/**
* The device path (\\.\ or \\?\).
* https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#dos-device-paths
*/
const DOS_DEVICE_PATH_RE = /^\\\\([.?])/;
/**
* All backslashes except those escaping special characters.
* Windows: !()+@{}
* https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions
*/
const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@[\]{}])/g;
/**
* Designed to work only with simple paths: `dir\\file`.
*/
function unixify(filepath) {
return filepath.replace(/\\/g, '/');
}
path$i.unixify = unixify;
function makeAbsolute(cwd, filepath) {
return path$h.resolve(cwd, filepath);
}
path$i.makeAbsolute = makeAbsolute;
function removeLeadingDotSegment(entry) {
// We do not use `startsWith` because this is 10x slower than current implementation for some cases.
// eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with
if (entry.charAt(0) === '.') {
const secondCharactery = entry.charAt(1);
if (secondCharactery === '/' || secondCharactery === '\\') {
return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT);
}
}
return entry;
}
path$i.removeLeadingDotSegment = removeLeadingDotSegment;
path$i.escape = IS_WINDOWS_PLATFORM ? escapeWindowsPath : escapePosixPath;
function escapeWindowsPath(pattern) {
return pattern.replace(WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2');
}
path$i.escapeWindowsPath = escapeWindowsPath;
function escapePosixPath(pattern) {
return pattern.replace(POSIX_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2');
}
path$i.escapePosixPath = escapePosixPath;
path$i.convertPathToPattern = IS_WINDOWS_PLATFORM ? convertWindowsPathToPattern : convertPosixPathToPattern;
function convertWindowsPathToPattern(filepath) {
return escapeWindowsPath(filepath)
.replace(DOS_DEVICE_PATH_RE, '//$1')
.replace(WINDOWS_BACKSLASHES_RE, '/');
}
path$i.convertWindowsPathToPattern = convertWindowsPathToPattern;
function convertPosixPathToPattern(filepath) {
return escapePosixPath(filepath);
}
path$i.convertPosixPathToPattern = convertPosixPathToPattern;
var pattern$1 = {};
/*!
* is-extglob <https://github.com/jonschlinkert/is-extglob>
*
* Copyright (c) 2014-2016, Jon Schlinkert.
* Licensed under the MIT License.
*/
var isExtglob$1 = function isExtglob(str) {
if (typeof str !== 'string' || str === '') {
return false;
}
var match;
while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) {
if (match[2]) return true;
str = str.slice(match.index + match[0].length);
}
return false;
};
/*!
* is-glob <https://github.com/jonschlinkert/is-glob>
*
* Copyright (c) 2014-2017, Jon Schlinkert.
* Released under the MIT License.
*/
var isExtglob = isExtglob$1;
var chars = { '{': '}', '(': ')', '[': ']'};
var strictCheck = function(str) {
if (str[0] === '!') {
return true;
}
var index = 0;
var pipeIndex = -2;
var closeSquareIndex = -2;
var closeCurlyIndex = -2;
var closeParenIndex = -2;
var backSlashIndex = -2;
while (index < str.length) {
if (str[index] === '*') {
return true;
}
if (str[index + 1] === '?' && /[\].+)]/.test(str[index])) {
return true;
}
if (closeSquareIndex !== -1 && str[index] === '[' && str[index + 1] !== ']') {
if (closeSquareIndex < index) {
closeSquareIndex = str.indexOf(']', index);
}
if (closeSquareIndex > index) {
if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) {
return true;
}
backSlashIndex = str.indexOf('\\', index);
if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) {
return true;
}
}
}
if (closeCurlyIndex !== -1 && str[index] === '{' && str[index + 1] !== '}') {
closeCurlyIndex = str.indexOf('}', index);
if (closeCurlyIndex > index) {
backSlashIndex = str.indexOf('\\', index);
if (backSlashIndex === -1 || backSlashIndex > closeCurlyIndex) {
return true;
}
}
}
if (closeParenIndex !== -1 && str[index] === '(' && str[index + 1] === '?' && /[:!=]/.test(str[index + 2]) && str[index + 3] !== ')') {
closeParenIndex = str.indexOf(')', index);
if (closeParenIndex > index) {
backSlashIndex = str.indexOf('\\', index);
if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) {
return true;
}
}
}
if (pipeIndex !== -1 && str[index] === '(' && str[index + 1] !== '|') {
if (pipeIndex < index) {
pipeIndex = str.indexOf('|', index);
}
if (pipeIndex !== -1 && str[pipeIndex + 1] !== ')') {
closeParenIndex = str.indexOf(')', pipeIndex);
if (closeParenIndex > pipeIndex) {
backSlashIndex = str.indexOf('\\', pipeIndex);
if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) {
return true;
}
}
}
}
if (str[index] === '\\') {
var open = str[index + 1];
index += 2;
var close = chars[open];
if (close) {
var n = str.indexOf(close, index);
if (n !== -1) {
index = n + 1;
}
}
if (str[index] === '!') {
return true;
}
} else {
index++;
}
}
return false;
};
var relaxedCheck = function(str) {
if (str[0] === '!') {
return true;
}
var index = 0;
while (index < str.length) {
if (/[*?{}()[\]]/.test(str[index])) {
return true;
}
if (str[index] === '\\') {
var open = str[index + 1];
index += 2;
var close = chars[open];
if (close) {
var n = str.indexOf(close, index);
if (n !== -1) {
index = n + 1;
}
}
if (str[index] === '!') {
return true;
}
} else {
index++;
}
}
return false;
};
var isGlob$2 = function isGlob(str, options) {
if (typeof str !== 'string' || str === '') {
return false;
}
if (isExtglob(str)) {
return true;
}
var check = strictCheck;
// optionally relax check
if (options && options.strict === false) {
check = relaxedCheck;
}
return check(str);
};
var isGlob$1 = isGlob$2;
var pathPosixDirname = require$$0$4.posix.dirname;
var isWin32 = require$$2.platform() === 'win32';
var slash = '/';
var backslash = /\\/g;
var enclosure = /[\{\[].*[\}\]]$/;
var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/;
var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g;
/**
* @param {string} str
* @param {Object} opts
* @param {boolean} [opts.flipBackslashes=true]
* @returns {string}
*/
var globParent$2 = function globParent(str, opts) {
var options = Object.assign({ flipBackslashes: true }, opts);
// flip windows path separators
if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) {
str = str.replace(backslash, slash);
}
// special case for strings ending in enclosure containing path separator
if (enclosure.test(str)) {
str += slash;
}
// preserves full path in case of trailing path separator
str += 'a';
// remove path parts that are globby
do {
str = pathPosixDirname(str);
} while (isGlob$1(str) || globby.test(str));
// remove escape chars and return result
return str.replace(escaped, '$1');
};
var utils$f = {};
(function (exports) {
exports.isInteger = num => {
if (typeof num === 'number') {
return Number.isInteger(num);
}
if (typeof num === 'string' && num.trim() !== '') {
return Number.isInteger(Number(num));
}
return false;
};
/**
* Find a node of the given type
*/
exports.find = (node, type) => node.nodes.find(node => node.type === type);
/**
* Find a node of the given type
*/
exports.exceedsLimit = (min, max, step = 1, limit) => {
if (limit === false) return false;
if (!exports.isInteger(min) || !exports.isInteger(max)) return false;
return ((Number(max) - Number(min)) / Number(step)) >= limit;
};
/**
* Escape the given node with '\\' before node.value
*/
exports.escapeNode = (block, n = 0, type) => {
const node = block.nodes[n];
if (!node) return;
if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {
if (node.escaped !== true) {
node.value = '\\' + node.value;
node.escaped = true;
}
}
};
/**
* Returns true if the given brace node should be enclosed in literal braces
*/
exports.encloseBrace = node => {
if (node.type !== 'brace') return false;
if ((node.commas >> 0 + node.ranges >> 0) === 0) {
node.invalid = true;
return true;
}
return false;
};
/**
* Returns true if a brace node is invalid.
*/
exports.isInvalidBrace = block => {
if (block.type !== 'brace') return false;
if (block.invalid === true || block.dollar) return true;
if ((block.commas >> 0 + block.ranges >> 0) === 0) {
block.invalid = true;
return true;
}
if (block.open !== true || block.close !== true) {
block.invalid = true;
return true;
}
return false;
};
/**
* Returns true if a node is an open or close node
*/
exports.isOpenOrClose = node => {
if (node.type === 'open' || node.type === 'close') {
return true;
}
return node.open === true || node.close === true;
};
/**
* Reduce an array of text nodes.
*/
exports.reduce = nodes => nodes.reduce((acc, node) => {
if (node.type === 'text') acc.push(node.value);
if (node.type === 'range') node.type = 'text';
return acc;
}, []);
/**
* Flatten an array
*/
exports.flatten = (...args) => {
const result = [];
const flat = arr => {
for (let i = 0; i < arr.length; i++) {
const ele = arr[i];
if (Array.isArray(ele)) {
flat(ele);
continue;
}
if (ele !== undefined) {
result.push(ele);
}
}
return result;
};
flat(args);
return result;
};
} (utils$f));
const utils$e = utils$f;
var stringify$7 = (ast, options = {}) => {
const stringify = (node, parent = {}) => {
const invalidBlock = options.escapeInvalid && utils$e.isInvalidBrace(parent);
const invalidNode = node.invalid === true && options.escapeInvalid === true;
let output = '';
if (node.value) {
if ((invalidBlock || invalidNode) && utils$e.isOpenOrClose(node)) {
return '\\' + node.value;
}
return node.value;
}
if (node.value) {
return node.value;
}
if (node.nodes) {
for (const child of node.nodes) {
output += stringify(child);
}
}
return output;
};
return stringify(ast);
};
/*!
* is-number <https://github.com/jonschlinkert/is-number>
*
* Copyright (c) 2014-present, Jon Schlinkert.
* Released under the MIT License.
*/
var isNumber$2 = function(num) {
if (typeof num === 'number') {
return num - num === 0;
}
if (typeof num === 'string' && num.trim() !== '') {
return Number.isFinite ? Number.isFinite(+num) : isFinite(+num);
}
return false;
};
/*!
* to-regex-range <https://github.com/micromatch/to-regex-range>
*
* Copyright (c) 2015-present, Jon Schlinkert.
* Released under the MIT License.
*/
const isNumber$1 = isNumber$2;
const toRegexRange$1 = (min, max, options) => {
if (isNumber$1(min) === false) {
throw new TypeError('toRegexRange: expected the first argument to be a number');
}
if (max === void 0 || min === max) {
return String(min);
}
if (isNumber$1(max) === false) {
throw new TypeError('toRegexRange: expected the second argument to be a number.');
}
let opts = { relaxZeros: true, ...options };
if (typeof opts.strictZeros === 'boolean') {
opts.relaxZeros = opts.strictZeros === false;
}
let relax = String(opts.relaxZeros);
let shorthand = String(opts.shorthand);
let capture = String(opts.capture);
let wrap = String(opts.wrap);
let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap;
if (toRegexRange$1.cache.hasOwnProperty(cacheKey)) {
return toRegexRange$1.cache[cacheKey].result;
}
let a = Math.min(min, max);
let b = Math.max(min, max);
if (Math.abs(a - b) === 1) {
let result = min + '|' + max;
if (opts.capture) {
return `(${result})`;
}
if (opts.wrap === false) {
return result;
}
return `(?:${result})`;
}
let isPadded = hasPadding(min) || hasPadding(max);
let state = { min, max, a, b };
let positives = [];
let negatives = [];
if (isPadded) {
state.isPadded = isPadded;
state.maxLen = String(state.max).length;
}
if (a < 0) {
let newMin = b < 0 ? Math.abs(b) : 1;
negatives = splitToPatterns(newMin, Math.abs(a), state, opts);
a = state.a = 0;
}
if (b >= 0) {
positives = splitToPatterns(a, b, state, opts);
}
state.negatives = negatives;
state.positives = positives;
state.result = collatePatterns(negatives, positives);
if (opts.capture === true) {
state.result = `(${state.result})`;
} else if (opts.wrap !== false && (positives.length + negatives.length) > 1) {
state.result = `(?:${state.result})`;
}
toRegexRange$1.cache[cacheKey] = state;
return state.result;
};
function collatePatterns(neg, pos, options) {
let onlyNegative = filterPatterns(neg, pos, '-', false) || [];
let onlyPositive = filterPatterns(pos, neg, '', false) || [];
let intersected = filterPatterns(neg, pos, '-?', true) || [];
let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive);
return subpatterns.join('|');
}
function splitToRanges(min, max) {
let nines = 1;
let zeros = 1;
let stop = countNines(min, nines);
let stops = new Set([max]);
while (min <= stop && stop <= max) {
stops.add(stop);
nines += 1;
stop = countNines(min, nines);
}
stop = countZeros(max + 1, zeros) - 1;
while (min < stop && stop <= max) {
stops.add(stop);
zeros += 1;
stop = countZeros(max + 1, zeros) - 1;
}
stops = [...stops];
stops.sort(compare);
return stops;
}
/**
* Convert a range to a regex pattern
* @param {Number} `start`
* @param {Number} `stop`
* @return {String}
*/
function rangeToPattern(start, stop, options) {
if (start === stop) {
return { pattern: start, count: [], digits: 0 };
}
let zipped = zip(start, stop);
let digits = zipped.length;
let pattern = '';
let count = 0;
for (let i = 0; i < digits; i++) {
let [startDigit, stopDigit] = zipped[i];
if (startDigit === stopDigit) {
pattern += startDigit;
} else if (startDigit !== '0' || stopDigit !== '9') {
pattern += toCharacterClass(startDigit, stopDigit);
} else {
count++;
}
}
if (count) {
pattern += options.shorthand === true ? '\\d' : '[0-9]';
}
return { pattern, count: [count], digits };
}
function splitToPatterns(min, max, tok, options) {
let ranges = splitToRanges(min, max);
let tokens = [];
let start = min;
let prev;
for (let i = 0; i < ranges.length; i++) {
let max = ranges[i];
let obj = rangeToPattern(String(start), String(max), options);
let zeros = '';
if (!tok.isPadded && prev && prev.pattern === obj.pattern) {
if (prev.count.length > 1) {
prev.count.pop();
}
prev.count.push(obj.count[0]);
prev.string = prev.pattern + toQuantifier(prev.count);
start = max + 1;
continue;
}
if (tok.isPadded) {
zeros = padZeros(max, tok, options);
}
obj.string = zeros + obj.pattern + toQuantifier(obj.count);
tokens.push(obj);
start = max + 1;
prev = obj;
}
return tokens;
}
function filterPatterns(arr, comparison, prefix, intersection, options) {
let result = [];
for (let ele of arr) {
let { string } = ele;
// only push if _both_ are negative...
if (!intersection && !contains(comparison, 'string', string)) {
result.push(prefix + string);
}
// or _both_ are positive
if (intersection && contains(comparison, 'string', string)) {
result.push(prefix + string);
}
}
return result;
}
/**
* Zip strings
*/
function zip(a, b) {
let arr = [];
for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]);
return arr;
}
function compare(a, b) {
return a > b ? 1 : b > a ? -1 : 0;
}
function contains(arr, key, val) {
return arr.some(ele => ele[key] === val);
}
function countNines(min, len) {
return Number(String(min).slice(0, -len) + '9'.repeat(len));
}
function countZeros(integer, zeros) {
return integer - (integer % Math.pow(10, zeros));
}
function toQuantifier(digits) {
let [start = 0, stop = ''] = digits;
if (stop || start > 1) {
return `{${start + (stop ? ',' + stop : '')}}`;
}
return '';
}
function toCharacterClass(a, b, options) {
return `[${a}${(b - a === 1) ? '' : '-'}${b}]`;
}
function hasPadding(str) {
return /^-?(0+)\d/.test(str);
}
function padZeros(value, tok, options) {
if (!tok.isPadded) {
return value;
}
let diff = Math.abs(tok.maxLen - String(value).length);
let relax = options.relaxZeros !== false;
switch (diff) {
case 0:
return '';
case 1:
return relax ? '0?' : '0';
case 2:
return relax ? '0{0,2}' : '00';
default: {
return relax ? `0{0,${diff}}` : `0{${diff}}`;
}
}
}
/**
* Cache
*/
toRegexRange$1.cache = {};
toRegexRange$1.clearCache = () => (toRegexRange$1.cache = {});
/**
* Expose `toRegexRange`
*/
var toRegexRange_1 = toRegexRange$1;
/*!
* fill-range <https://github.com/jonschlinkert/fill-range>
*
* Copyright (c) 2014-present, Jon Schlinkert.
* Licensed under the MIT License.
*/
const util$1 = require$$0$5;
const toRegexRange = toRegexRange_1;
const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
const transform = toNumber => {
return value => toNumber === true ? Number(value) : String(value);
};
const isValidValue = value => {
return typeof value === 'number' || (typeof value === 'string' && value !== '');
};
const isNumber = num => Number.isInteger(+num);
const zeros = input => {
let value = `${input}`;
let index = -1;
if (value[0] === '-') value = value.slice(1);
if (value === '0') return false;
while (value[++index] === '0');
return index > 0;
};
const stringify$6 = (start, end, options) => {
if (typeof start === 'string' || typeof end === 'string') {
return true;
}
return options.stringify === true;
};
const pad = (input, maxLength, toNumber) => {
if (maxLength > 0) {
let dash = input[0] === '-' ? '-' : '';
if (dash) input = input.slice(1);
input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0'));
}
if (toNumber === false) {
return String(input);
}
return input;
};
const toMaxLen = (input, maxLength) => {
let negative = input[0] === '-' ? '-' : '';
if (negative) {
input = input.slice(1);
maxLength--;
}
while (input.length < maxLength) input = '0' + input;
return negative ? ('-' + input) : input;
};
const toSequence = (parts, options, maxLen) => {
parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
let prefix = options.capture ? '' : '?:';
let positives = '';
let negatives = '';
let result;
if (parts.positives.length) {
positives = parts.positives.map(v => toMaxLen(String(v), maxLen)).join('|');
}
if (parts.negatives.length) {
negatives = `-(${prefix}${parts.negatives.map(v => toMaxLen(String(v), maxLen)).join('|')})`;
}
if (positives && negatives) {
result = `${positives}|${negatives}`;
} else {
result = positives || negatives;
}
if (options.wrap) {
return `(${prefix}${result})`;
}
return result;
};
const toRange = (a, b, isNumbers, options) => {
if (isNumbers) {
return toRegexRange(a, b, { wrap: false, ...options });
}
let start = String.fromCharCode(a);
if (a === b) return start;
let stop = String.fromCharCode(b);
return `[${start}-${stop}]`;
};
const toRegex = (start, end, options) => {
if (Array.isArray(start)) {
let wrap = options.wrap === true;
let prefix = options.capture ? '' : '?:';
return wrap ? `(${prefix}${start.join('|')})` : start.join('|');
}
return toRegexRange(start, end, options);
};
const rangeError = (...args) => {
return new RangeError('Invalid range arguments: ' + util$1.inspect(...args));
};
const invalidRange = (start, end, options) => {
if (options.strictRanges === true) throw rangeError([start, end]);
return [];
};
const invalidStep = (step, options) => {
if (options.strictRanges === true) {
throw new TypeError(`Expected step "${step}" to be a number`);
}
return [];
};
const fillNumbers = (start, end, step = 1, options = {}) => {
let a = Number(start);
let b = Number(end);
if (!Number.isInteger(a) || !Number.isInteger(b)) {
if (options.strictRanges === true) throw rangeError([start, end]);
return [];
}
// fix negative zero
if (a === 0) a = 0;
if (b === 0) b = 0;
let descending = a > b;
let startString = String(start);
let endString = String(end);
let stepString = String(step);
step = Math.max(Math.abs(step), 1);
let padded = zeros(startString) || zeros(endString) || zeros(stepString);
let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0;
let toNumber = padded === false && stringify$6(start, end, options) === false;
let format = options.transform || transform(toNumber);
if (options.toRegex && step === 1) {
return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options);
}
let parts = { negatives: [], positives: [] };
let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num));
let range = [];
let index = 0;
while (descending ? a >= b : a <= b) {
if (options.toRegex === true && step > 1) {
push(a);
} else {
range.push(pad(format(a, index), maxLen, toNumber));
}
a = descending ? a - step : a + step;
index++;
}
if (options.toRegex === true) {
return step > 1
? toSequence(parts, options, maxLen)
: toRegex(range, null, { wrap: false, ...options });
}
return range;
};
const fillLetters = (start, end, step = 1, options = {}) => {
if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) {
return invalidRange(start, end, options);
}
let format = options.transform || (val => String.fromCharCode(val));
let a = `${start}`.charCodeAt(0);
let b = `${end}`.charCodeAt(0);
let descending = a > b;
let min = Math.min(a, b);
let max = Math.max(a, b);
if (options.toRegex && step === 1) {
return toRange(min, max, false, options);
}
let range = [];
let index = 0;
while (descending ? a >= b : a <= b) {
range.push(format(a, index));
a = descending ? a - step : a + step;
index++;
}
if (options.toRegex === true) {
return toRegex(range, null, { wrap: false, options });
}
return range;
};
const fill$2 = (start, end, step, options = {}) => {
if (end == null && isValidValue(start)) {
return [start];
}
if (!isValidValue(start) || !isValidValue(end)) {
return invalidRange(start, end, options);
}
if (typeof step === 'function') {
return fill$2(start, end, 1, { transform: step });
}
if (isObject(step)) {
return fill$2(start, end, 0, step);
}
let opts = { ...options };
if (opts.capture === true) opts.wrap = true;
step = step || opts.step || 1;
if (!isNumber(step)) {
if (step != null && !isObject(step)) return invalidStep(step, opts);
return fill$2(start, end, 1, step);
}
if (isNumber(start) && isNumber(end)) {
return fillNumbers(start, end, step, opts);
}
return fillLetters(start, end, Math.max(Math.abs(step), 1), opts);
};
var fillRange = fill$2;
const fill$1 = fillRange;
const utils$d = utils$f;
const compile$1 = (ast, options = {}) => {
const walk = (node, parent = {}) => {
const invalidBlock = utils$d.isInvalidBrace(parent);
const invalidNode = node.invalid === true && options.escapeInvalid === true;
const invalid = invalidBlock === true || invalidNode === true;
const prefix = options.escapeInvalid === true ? '\\' : '';
let output = '';
if (node.isOpen === true) {
return prefix + node.value;
}
if (node.isClose === true) {
console.log('node.isClose', prefix, node.value);
return prefix + node.value;
}
if (node.type === 'open') {
return invalid ? prefix + node.value : '(';
}
if (node.type === 'close') {
return invalid ? prefix + node.value : ')';
}
if (node.type === 'comma') {
return node.prev.type === 'comma' ? '' : invalid ? node.value : '|';
}
if (node.value) {
return node.value;
}
if (node.nodes && node.ranges > 0) {
const args = utils$d.reduce(node.nodes);
const range = fill$1(...args, { ...options, wrap: false, toRegex: true, strictZeros: true });
if (range.length !== 0) {
return args.length > 1 && range.length > 1 ? `(${range})` : range;
}
}
if (node.nodes) {
for (const child of node.nodes) {
output += walk(child, node);
}
}
return output;
};
return walk(ast);
};
var compile_1 = compile$1;
const fill = fillRange;
const stringify$5 = stringify$7;
const utils$c = utils$f;
const append$1 = (queue = '', stash = '', enclose = false) => {
const result = [];
queue = [].concat(queue);
stash = [].concat(stash);
if (!stash.length) return queue;
if (!queue.length) {
return enclose ? utils$c.flatten(stash).map(ele => `{${ele}}`) : stash;
}
for (const item of queue) {
if (Array.isArray(item)) {
for (const value of item) {
result.push(append$1(value, stash, enclose));
}
} else {
for (let ele of stash) {
if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;
result.push(Array.isArray(ele) ? append$1(item, ele, enclose) : item + ele);
}
}
}
return utils$c.flatten(result);
};
const expand$2 = (ast, options = {}) => {
const rangeLimit = options.rangeLimit === undefined ? 1000 : options.rangeLimit;
const walk = (node, parent = {}) => {
node.queue = [];
let p = parent;
let q = parent.queue;
while (p.type !== 'brace' && p.type !== 'root' && p.parent) {
p = p.parent;
q = p.queue;
}
if (node.invalid || node.dollar) {
q.push(append$1(q.pop(), stringify$5(node, options)));
return;
}
if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {
q.push(append$1(q.pop(), ['{}']));
return;
}
if (node.nodes && node.ranges > 0) {
const args = utils$c.reduce(node.nodes);
if (utils$c.exceedsLimit(...args, options.step, rangeLimit)) {
throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');
}
let range = fill(...args, options);
if (range.length === 0) {
range = stringify$5(node, options);
}
q.push(append$1(q.pop(), range));
node.nodes = [];
return;
}
const enclose = utils$c.encloseBrace(node);
let queue = node.queue;
let block = node;
while (block.type !== 'brace' && block.type !== 'root' && block.parent) {
block = block.parent;
queue = block.queue;
}
for (let i = 0; i < node.nodes.length; i++) {
const child = node.nodes[i];
if (child.type === 'comma' && node.type === 'brace') {
if (i === 1) queue.push('');
queue.push('');
continue;
}
if (child.type === 'close') {
q.push(append$1(q.pop(), queue, enclose));
continue;
}
if (child.value && child.type !== 'open') {
queue.push(append$1(queue.pop(), child.value));
continue;
}
if (child.nodes) {
walk(child, node);
}
}
return queue;
};
return utils$c.flatten(walk(ast));
};
var expand_1$1 = expand$2;
var constants$3 = {
MAX_LENGTH: 10000,
// Digits
CHAR_0: '0', /* 0 */
CHAR_9: '9', /* 9 */
// Alphabet chars.
CHAR_UPPERCASE_A: 'A', /* A */
CHAR_LOWERCASE_A: 'a', /* a */
CHAR_UPPERCASE_Z: 'Z', /* Z */
CHAR_LOWERCASE_Z: 'z', /* z */
CHAR_LEFT_PARENTHESES: '(', /* ( */
CHAR_RIGHT_PARENTHESES: ')', /* ) */
CHAR_ASTERISK: '*', /* * */
// Non-alphabetic chars.
CHAR_AMPERSAND: '&', /* & */
CHAR_AT: '@', /* @ */
CHAR_BACKSLASH: '\\', /* \ */
CHAR_BACKTICK: '`', /* ` */
CHAR_CARRIAGE_RETURN: '\r', /* \r */
CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */
CHAR_COLON: ':', /* : */
CHAR_COMMA: ',', /* , */
CHAR_DOLLAR: '$', /* . */
CHAR_DOT: '.', /* . */
CHAR_DOUBLE_QUOTE: '"', /* " */
CHAR_EQUAL: '=', /* = */
CHAR_EXCLAMATION_MARK: '!', /* ! */
CHAR_FORM_FEED: '\f', /* \f */
CHAR_FORWARD_SLASH: '/', /* / */
CHAR_HASH: '#', /* # */
CHAR_HYPHEN_MINUS: '-', /* - */
CHAR_LEFT_ANGLE_BRACKET: '<', /* < */
CHAR_LEFT_CURLY_BRACE: '{', /* { */
CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */
CHAR_LINE_FEED: '\n', /* \n */
CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */
CHAR_PERCENT: '%', /* % */
CHAR_PLUS: '+', /* + */
CHAR_QUESTION_MARK: '?', /* ? */
CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */
CHAR_RIGHT_CURLY_BRACE: '}', /* } */
CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */
CHAR_SEMICOLON: ';', /* ; */
CHAR_SINGLE_QUOTE: '\'', /* ' */
CHAR_SPACE: ' ', /* */
CHAR_TAB: '\t', /* \t */
CHAR_UNDERSCORE: '_', /* _ */
CHAR_VERTICAL_LINE: '|', /* | */
CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */
};
const stringify$4 = stringify$7;
/**
* Constants
*/
const {
MAX_LENGTH,
CHAR_BACKSLASH, /* \ */
CHAR_BACKTICK, /* ` */
CHAR_COMMA, /* , */
CHAR_DOT, /* . */
CHAR_LEFT_PARENTHESES, /* ( */
CHAR_RIGHT_PARENTHESES, /* ) */
CHAR_LEFT_CURLY_BRACE, /* { */
CHAR_RIGHT_CURLY_BRACE, /* } */
CHAR_LEFT_SQUARE_BRACKET, /* [ */
CHAR_RIGHT_SQUARE_BRACKET, /* ] */
CHAR_DOUBLE_QUOTE, /* " */
CHAR_SINGLE_QUOTE, /* ' */
CHAR_NO_BREAK_SPACE,
CHAR_ZERO_WIDTH_NOBREAK_SPACE
} = constants$3;
/**
* parse
*/
const parse$c = (input, options = {}) => {
if (typeof input !== 'string') {
throw new TypeError('Expected a string');
}
const opts = options || {};
const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
if (input.length > max) {
throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);
}
const ast = { type: 'root', input, nodes: [] };
const stack = [ast];
let block = ast;
let prev = ast;
let brackets = 0;
const length = input.length;
let index = 0;
let depth = 0;
let value;
/**
* Helpers
*/
const advance = () => input[index++];
const push = node => {
if (node.type === 'text' && prev.type === 'dot') {
prev.type = 'text';
}
if (prev && prev.type === 'text' && node.type === 'text') {
prev.value += node.value;
return;
}
block.nodes.push(node);
node.parent = block;
node.prev = prev;
prev = node;
return node;
};
push({ type: 'bos' });
while (index < length) {
block = stack[stack.length - 1];
value = advance();
/**
* Invalid chars
*/
if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {
continue;
}
/**
* Escaped chars
*/
if (value === CHAR_BACKSLASH) {
push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });
continue;
}
/**
* Right square bracket (literal): ']'
*/
if (value === CHAR_RIGHT_SQUARE_BRACKET) {
push({ type: 'text', value: '\\' + value });
continue;
}
/**
* Left square bracket: '['
*/
if (value === CHAR_LEFT_SQUARE_BRACKET) {
brackets++;
let next;
while (index < length && (next = advance())) {
value += next;
if (next === CHAR_LEFT_SQUARE_BRACKET) {
brackets++;
continue;
}
if (next === CHAR_BACKSLASH) {
value += advance();
continue;
}
if (next === CHAR_RIGHT_SQUARE_BRACKET) {
brackets--;
if (brackets === 0) {
break;
}
}
}
push({ type: 'text', value });
continue;
}
/**
* Parentheses
*/
if (value === CHAR_LEFT_PARENTHESES) {
block = push({ type: 'paren', nodes: [] });
stack.push(block);
push({ type: 'text', value });
continue;
}
if (value === CHAR_RIGHT_PARENTHESES) {
if (block.type !== 'paren') {
push({ type: 'text', value });
continue;
}
block = stack.pop();
push({ type: 'text', value });
block = stack[stack.length - 1];
continue;
}
/**
* Quotes: '|"|`
*/
if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {
const open = value;
let next;
if (options.keepQuotes !== true) {
value = '';
}
while (index < length && (next = advance())) {
if (next === CHAR_BACKSLASH) {
value += next + advance();
continue;
}
if (next === open) {
if (options.keepQuotes === true) value += next;
break;
}
value += next;
}
push({ type: 'text', value });
continue;
}
/**
* Left curly brace: '{'
*/
if (value === CHAR_LEFT_CURLY_BRACE) {
depth++;
const dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;
const brace = {
type: 'brace',
open: true,
close: false,
dollar,
depth,
commas: 0,
ranges: 0,
nodes: []
};
block = push(brace);
stack.push(block);
push({ type: 'open', value });
continue;
}
/**
* Right curly brace: '}'
*/
if (value === CHAR_RIGHT_CURLY_BRACE) {
if (block.type !== 'brace') {
push({ type: 'text', value });
continue;
}
const type = 'close';
block = stack.pop();
block.close = true;
push({ type, value });
depth--;
block = stack[stack.length - 1];
continue;
}
/**
* Comma: ','
*/
if (value === CHAR_COMMA && depth > 0) {
if (block.ranges > 0) {
block.ranges = 0;
const open = block.nodes.shift();
block.nodes = [open, { type: 'text', value: stringify$4(block) }];
}
push({ type: 'comma', value });
block.commas++;
continue;
}
/**
* Dot: '.'
*/
if (value === CHAR_DOT && depth > 0 && block.commas === 0) {
const siblings = block.nodes;
if (depth === 0 || siblings.length === 0) {
push({ type: 'text', value });
continue;
}
if (prev.type === 'dot') {
block.range = [];
prev.value += value;
prev.type = 'range';
if (block.nodes.length !== 3 && block.nodes.length !== 5) {
block.invalid = true;
block.ranges = 0;
prev.type = 'text';
continue;
}
block.ranges++;
block.args = [];
continue;
}
if (prev.type === 'range') {
siblings.pop();
const before = siblings[siblings.length - 1];
before.value += prev.value + value;
prev = before;
block.ranges--;
continue;
}
push({ type: 'dot', value });
continue;
}
/**
* Text
*/
push({ type: 'text', value });
}
// Mark imbalanced braces and brackets as invalid
do {
block = stack.pop();
if (block.type !== 'root') {
block.nodes.forEach(node => {
if (!node.nodes) {
if (node.type === 'open') node.isOpen = true;
if (node.type === 'close') node.isClose = true;
if (!node.nodes) node.type = 'text';
node.invalid = true;
}
});
// get the location of the block on parent.nodes (block's siblings)
const parent = stack[stack.length - 1];
const index = parent.nodes.indexOf(block);
// replace the (invalid) block with it's nodes
parent.nodes.splice(index, 1, ...block.nodes);
}
} while (stack.length > 0);
push({ type: 'eos' });
return ast;
};
var parse_1$2 = parse$c;
const stringify$3 = stringify$7;
const compile = compile_1;
const expand$1 = expand_1$1;
const parse$b = parse_1$2;
/**
* Expand the given pattern or create a regex-compatible string.
*
* ```js
* const braces = require('braces');
* console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']
* console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']
* ```
* @param {String} `str`
* @param {Object} `options`
* @return {String}
* @api public
*/
const braces$2 = (input, options = {}) => {
let output = [];
if (Array.isArray(input)) {
for (const pattern of input) {
const result = braces$2.create(pattern, options);
if (Array.isArray(result)) {
output.push(...result);
} else {
output.push(result);
}
}
} else {
output = [].concat(braces$2.create(input, options));
}
if (options && options.expand === true && options.nodupes === true) {
output = [...new Set(output)];
}
return output;
};
/**
* Parse the given `str` with the given `options`.
*
* ```js
* // braces.parse(pattern, [, options]);
* const ast = braces.parse('a/{b,c}/d');
* console.log(ast);
* ```
* @param {String} pattern Brace pattern to parse
* @param {Object} options
* @return {Object} Returns an AST
* @api public
*/
braces$2.parse = (input, options = {}) => parse$b(input, options);
/**
* Creates a braces string from an AST, or an AST node.
*
* ```js
* const braces = require('braces');
* let ast = braces.parse('foo/{a,b}/bar');
* console.log(stringify(ast.nodes[2])); //=> '{a,b}'
* ```
* @param {String} `input` Brace pattern or AST.
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces$2.stringify = (input, options = {}) => {
if (typeof input === 'string') {
return stringify$3(braces$2.parse(input, options), options);
}
return stringify$3(input, options);
};
/**
* Compiles a brace pattern into a regex-compatible, optimized string.
* This method is called by the main [braces](#braces) function by default.
*
* ```js
* const braces = require('braces');
* console.log(braces.compile('a/{b,c}/d'));
* //=> ['a/(b|c)/d']
* ```
* @param {String} `input` Brace pattern or AST.
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces$2.compile = (input, options = {}) => {
if (typeof input === 'string') {
input = braces$2.parse(input, options);
}
return compile(input, options);
};
/**
* Expands a brace pattern into an array. This method is called by the
* main [braces](#braces) function when `options.expand` is true. Before
* using this method it's recommended that you read the [performance notes](#performance))
* and advantages of using [.compile](#compile) instead.
*
* ```js
* const braces = require('braces');
* console.log(braces.expand('a/{b,c}/d'));
* //=> ['a/b/d', 'a/c/d'];
* ```
* @param {String} `pattern` Brace pattern
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces$2.expand = (input, options = {}) => {
if (typeof input === 'string') {
input = braces$2.parse(input, options);
}
let result = expand$1(input, options);
// filter out empty strings if specified
if (options.noempty === true) {
result = result.filter(Boolean);
}
// filter out duplicates if specified
if (options.nodupes === true) {
result = [...new Set(result)];
}
return result;
};
/**
* Processes a brace pattern and returns either an expanded array
* (if `options.expand` is true), a highly optimized regex-compatible string.
* This method is called by the main [braces](#braces) function.
*
* ```js
* const braces = require('braces');
* console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))
* //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'
* ```
* @param {String} `pattern` Brace pattern
* @param {Object} `options`
* @return {Array} Returns an array of expanded values.
* @api public
*/
braces$2.create = (input, options = {}) => {
if (input === '' || input.length < 3) {
return [input];
}
return options.expand !== true
? braces$2.compile(input, options)
: braces$2.expand(input, options);
};
/**
* Expose "braces"
*/
var braces_1 = braces$2;
const util = require$$0$5;
const braces$1 = braces_1;
const picomatch$2 = picomatch$3;
const utils$b = utils$k;
const isEmptyString = val => val === '' || val === './';
/**
* Returns an array of strings that match one or more glob patterns.
*
* ```js
* const mm = require('micromatch');
* // mm(list, patterns[, options]);
*
* console.log(mm(['a.js', 'a.txt'], ['*.js']));
* //=> [ 'a.js' ]
* ```
* @param {String|Array<string>} `list` List of strings to match.
* @param {String|Array<string>} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options)
* @return {Array} Returns an array of matches
* @summary false
* @api public
*/
const micromatch$1 = (list, patterns, options) => {
patterns = [].concat(patterns);
list = [].concat(list);
let omit = new Set();
let keep = new Set();
let items = new Set();
let negatives = 0;
let onResult = state => {
items.add(state.output);
if (options && options.onResult) {
options.onResult(state);
}
};
for (let i = 0; i < patterns.length; i++) {
let isMatch = picomatch$2(String(patterns[i]), { ...options, onResult }, true);
let negated = isMatch.state.negated || isMatch.state.negatedExtglob;
if (negated) negatives++;
for (let item of list) {
let matched = isMatch(item, true);
let match = negated ? !matched.isMatch : matched.isMatch;
if (!match) continue;
if (negated) {
omit.add(matched.output);
} else {
omit.delete(matched.output);
keep.add(matched.output);
}
}
}
let result = negatives === patterns.length ? [...items] : [...keep];
let matches = result.filter(item => !omit.has(item));
if (options && matches.length === 0) {
if (options.failglob === true) {
throw new Error(`No matches found for "${patterns.join(', ')}"`);
}
if (options.nonull === true || options.nullglob === true) {
return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns;
}
}
return matches;
};
/**
* Backwards compatibility
*/
micromatch$1.match = micromatch$1;
/**
* Returns a matcher function from the given glob `pattern` and `options`.
* The returned function takes a string to match as its only argument and returns
* true if the string is a match.
*
* ```js
* const mm = require('micromatch');
* // mm.matcher(pattern[, options]);
*
* const isMatch = mm.matcher('*.!(*a)');
* console.log(isMatch('a.a')); //=> false
* console.log(isMatch('a.b')); //=> true
* ```
* @param {String} `pattern` Glob pattern
* @param {Object} `options`
* @return {Function} Returns a matcher function.
* @api public
*/
micromatch$1.matcher = (pattern, options) => picomatch$2(pattern, options);
/**
* Returns true if **any** of the given glob `patterns` match the specified `string`.
*
* ```js
* const mm = require('micromatch');
* // mm.isMatch(string, patterns[, options]);
*
* console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true
* console.log(mm.isMatch('a.a', 'b.*')); //=> false
* ```
* @param {String} `str` The string to test.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `[options]` See available [options](#options).
* @return {Boolean} Returns true if any patterns match `str`
* @api public
*/
micromatch$1.isMatch = (str, patterns, options) => picomatch$2(patterns, options)(str);
/**
* Backwards compatibility
*/
micromatch$1.any = micromatch$1.isMatch;
/**
* Returns a list of strings that _**do not match any**_ of the given `patterns`.
*
* ```js
* const mm = require('micromatch');
* // mm.not(list, patterns[, options]);
*
* console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a'));
* //=> ['b.b', 'c.c']
* ```
* @param {Array} `list` Array of strings to match.
* @param {String|Array} `patterns` One or more glob pattern to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Array} Returns an array of strings that **do not match** the given patterns.
* @api public
*/
micromatch$1.not = (list, patterns, options = {}) => {
patterns = [].concat(patterns).map(String);
let result = new Set();
let items = [];
let onResult = state => {
if (options.onResult) options.onResult(state);
items.push(state.output);
};
let matches = new Set(micromatch$1(list, patterns, { ...options, onResult }));
for (let item of items) {
if (!matches.has(item)) {
result.add(item);
}
}
return [...result];
};
/**
* Returns true if the given `string` contains the given pattern. Similar
* to [.isMatch](#isMatch) but the pattern can match any part of the string.
*
* ```js
* var mm = require('micromatch');
* // mm.contains(string, pattern[, options]);
*
* console.log(mm.contains('aa/bb/cc', '*b'));
* //=> true
* console.log(mm.contains('aa/bb/cc', '*d'));
* //=> false
* ```
* @param {String} `str` The string to match.
* @param {String|Array} `patterns` Glob pattern to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Boolean} Returns true if any of the patterns matches any part of `str`.
* @api public
*/
micromatch$1.contains = (str, pattern, options) => {
if (typeof str !== 'string') {
throw new TypeError(`Expected a string: "${util.inspect(str)}"`);
}
if (Array.isArray(pattern)) {
return pattern.some(p => micromatch$1.contains(str, p, options));
}
if (typeof pattern === 'string') {
if (isEmptyString(str) || isEmptyString(pattern)) {
return false;
}
if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) {
return true;
}
}
return micromatch$1.isMatch(str, pattern, { ...options, contains: true });
};
/**
* Filter the keys of the given object with the given `glob` pattern
* and `options`. Does not attempt to match nested keys. If you need this feature,
* use [glob-object][] instead.
*
* ```js
* const mm = require('micromatch');
* // mm.matchKeys(object, patterns[, options]);
*
* const obj = { aa: 'a', ab: 'b', ac: 'c' };
* console.log(mm.matchKeys(obj, '*b'));
* //=> { ab: 'b' }
* ```
* @param {Object} `object` The object with keys to filter.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Object} Returns an object with only keys that match the given patterns.
* @api public
*/
micromatch$1.matchKeys = (obj, patterns, options) => {
if (!utils$b.isObject(obj)) {
throw new TypeError('Expected the first argument to be an object');
}
let keys = micromatch$1(Object.keys(obj), patterns, options);
let res = {};
for (let key of keys) res[key] = obj[key];
return res;
};
/**
* Returns true if some of the strings in the given `list` match any of the given glob `patterns`.
*
* ```js
* const mm = require('micromatch');
* // mm.some(list, patterns[, options]);
*
* console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js']));
* // true
* console.log(mm.some(['foo.js'], ['*.js', '!foo.js']));
* // false
* ```
* @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Boolean} Returns true if any `patterns` matches any of the strings in `list`
* @api public
*/
micromatch$1.some = (list, patterns, options) => {
let items = [].concat(list);
for (let pattern of [].concat(patterns)) {
let isMatch = picomatch$2(String(pattern), options);
if (items.some(item => isMatch(item))) {
return true;
}
}
return false;
};
/**
* Returns true if every string in the given `list` matches
* any of the given glob `patterns`.
*
* ```js
* const mm = require('micromatch');
* // mm.every(list, patterns[, options]);
*
* console.log(mm.every('foo.js', ['foo.js']));
* // true
* console.log(mm.every(['foo.js', 'bar.js'], ['*.js']));
* // true
* console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js']));
* // false
* console.log(mm.every(['foo.js'], ['*.js', '!foo.js']));
* // false
* ```
* @param {String|Array} `list` The string or array of strings to test.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Boolean} Returns true if all `patterns` matches all of the strings in `list`
* @api public
*/
micromatch$1.every = (list, patterns, options) => {
let items = [].concat(list);
for (let pattern of [].concat(patterns)) {
let isMatch = picomatch$2(String(pattern), options);
if (!items.every(item => isMatch(item))) {
return false;
}
}
return true;
};
/**
* Returns true if **all** of the given `patterns` match
* the specified string.
*
* ```js
* const mm = require('micromatch');
* // mm.all(string, patterns[, options]);
*
* console.log(mm.all('foo.js', ['foo.js']));
* // true
*
* console.log(mm.all('foo.js', ['*.js', '!foo.js']));
* // false
*
* console.log(mm.all('foo.js', ['*.js', 'foo.js']));
* // true
*
* console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js']));
* // true
* ```
* @param {String|Array} `str` The string to test.
* @param {String|Array} `patterns` One or more glob patterns to use for matching.
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Boolean} Returns true if any patterns match `str`
* @api public
*/
micromatch$1.all = (str, patterns, options) => {
if (typeof str !== 'string') {
throw new TypeError(`Expected a string: "${util.inspect(str)}"`);
}
return [].concat(patterns).every(p => picomatch$2(p, options)(str));
};
/**
* Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match.
*
* ```js
* const mm = require('micromatch');
* // mm.capture(pattern, string[, options]);
*
* console.log(mm.capture('test/*.js', 'test/foo.js'));
* //=> ['foo']
* console.log(mm.capture('test/*.js', 'foo/bar.css'));
* //=> null
* ```
* @param {String} `glob` Glob pattern to use for matching.
* @param {String} `input` String to match
* @param {Object} `options` See available [options](#options) for changing how matches are performed
* @return {Array|null} Returns an array of captures if the input matches the glob pattern, otherwise `null`.
* @api public
*/
micromatch$1.capture = (glob, input, options) => {
let posix = utils$b.isWindows(options);
let regex = picomatch$2.makeRe(String(glob), { ...options, capture: true });
let match = regex.exec(posix ? utils$b.toPosixSlashes(input) : input);
if (match) {
return match.slice(1).map(v => v === void 0 ? '' : v);
}
};
/**
* Create a regular expression from the given glob `pattern`.
*
* ```js
* const mm = require('micromatch');
* // mm.makeRe(pattern[, options]);
*
* console.log(mm.makeRe('*.js'));
* //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/
* ```
* @param {String} `pattern` A glob pattern to convert to regex.
* @param {Object} `options`
* @return {RegExp} Returns a regex created from the given pattern.
* @api public
*/
micromatch$1.makeRe = (...args) => picomatch$2.makeRe(...args);
/**
* Scan a glob pattern to separate the pattern into segments. Used
* by the [split](#split) method.
*
* ```js
* const mm = require('micromatch');
* const state = mm.scan(pattern[, options]);
* ```
* @param {String} `pattern`
* @param {Object} `options`
* @return {Object} Returns an object with
* @api public
*/
micromatch$1.scan = (...args) => picomatch$2.scan(...args);
/**
* Parse a glob pattern to create the source string for a regular
* expression.
*
* ```js
* const mm = require('micromatch');
* const state = mm.parse(pattern[, options]);
* ```
* @param {String} `glob`
* @param {Object} `options`
* @return {Object} Returns an object with useful properties and output to be used as regex source string.
* @api public
*/
micromatch$1.parse = (patterns, options) => {
let res = [];
for (let pattern of [].concat(patterns || [])) {
for (let str of braces$1(String(pattern), options)) {
res.push(picomatch$2.parse(str, options));
}
}
return res;
};
/**
* Process the given brace `pattern`.
*
* ```js
* const { braces } = require('micromatch');
* console.log(braces('foo/{a,b,c}/bar'));
* //=> [ 'foo/(a|b|c)/bar' ]
*
* console.log(braces('foo/{a,b,c}/bar', { expand: true }));
* //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ]
* ```
* @param {String} `pattern` String with brace pattern to process.
* @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options.
* @return {Array}
* @api public
*/
micromatch$1.braces = (pattern, options) => {
if (typeof pattern !== 'string') throw new TypeError('Expected a string');
if ((options && options.nobrace === true) || !/\{.*\}/.test(pattern)) {
return [pattern];
}
return braces$1(pattern, options);
};
/**
* Expand braces
*/
micromatch$1.braceExpand = (pattern, options) => {
if (typeof pattern !== 'string') throw new TypeError('Expected a string');
return micromatch$1.braces(pattern, { ...options, expand: true });
};
/**
* Expose micromatch
*/
var micromatch_1 = micromatch$1;
var micromatch$2 = /*@__PURE__*/getDefaultExportFromCjs(micromatch_1);
Object.defineProperty(pattern$1, "__esModule", { value: true });
pattern$1.removeDuplicateSlashes = pattern$1.matchAny = pattern$1.convertPatternsToRe = pattern$1.makeRe = pattern$1.getPatternParts = pattern$1.expandBraceExpansion = pattern$1.expandPatternsWithBraceExpansion = pattern$1.isAffectDepthOfReadingPattern = pattern$1.endsWithSlashGlobStar = pattern$1.hasGlobStar = pattern$1.getBaseDirectory = pattern$1.isPatternRelatedToParentDirectory = pattern$1.getPatternsOutsideCurrentDirectory = pattern$1.getPatternsInsideCurrentDirectory = pattern$1.getPositivePatterns = pattern$1.getNegativePatterns = pattern$1.isPositivePattern = pattern$1.isNegativePattern = pattern$1.convertToNegativePattern = pattern$1.convertToPositivePattern = pattern$1.isDynamicPattern = pattern$1.isStaticPattern = void 0;
const path$g = require$$0$4;
const globParent$1 = globParent$2;
const micromatch = micromatch_1;
const GLOBSTAR$1 = '**';
const ESCAPE_SYMBOL = '\\';
const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/;
const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[[^[]*]/;
const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\([^(]*\|[^|]*\)/;
const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\([^(]*\)/;
const BRACE_EXPANSION_SEPARATORS_RE = /,|\.\./;
/**
* Matches a sequence of two or more consecutive slashes, excluding the first two slashes at the beginning of the string.
* The latter is due to the presence of the device path at the beginning of the UNC path.
*/
const DOUBLE_SLASH_RE$1 = /(?!^)\/{2,}/g;
function isStaticPattern(pattern, options = {}) {
return !isDynamicPattern(pattern, options);
}
pattern$1.isStaticPattern = isStaticPattern;
function isDynamicPattern(pattern, options = {}) {
/**
* A special case with an empty string is necessary for matching patterns that start with a forward slash.
* An empty string cannot be a dynamic pattern.
* For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'.
*/
if (pattern === '') {
return false;
}
/**
* When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check
* filepath directly (without read directory).
*/
if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) {
return true;
}
if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) {
return true;
}
if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) {
return true;
}
if (options.braceExpansion !== false && hasBraceExpansion(pattern)) {
return true;
}
return false;
}
pattern$1.isDynamicPattern = isDynamicPattern;
function hasBraceExpansion(pattern) {
const openingBraceIndex = pattern.indexOf('{');
if (openingBraceIndex === -1) {
return false;
}
const closingBraceIndex = pattern.indexOf('}', openingBraceIndex + 1);
if (closingBraceIndex === -1) {
return false;
}
const braceContent = pattern.slice(openingBraceIndex, closingBraceIndex);
return BRACE_EXPANSION_SEPARATORS_RE.test(braceContent);
}
function convertToPositivePattern(pattern) {
return isNegativePattern(pattern) ? pattern.slice(1) : pattern;
}
pattern$1.convertToPositivePattern = convertToPositivePattern;
function convertToNegativePattern(pattern) {
return '!' + pattern;
}
pattern$1.convertToNegativePattern = convertToNegativePattern;
function isNegativePattern(pattern) {
return pattern.startsWith('!') && pattern[1] !== '(';
}
pattern$1.isNegativePattern = isNegativePattern;
function isPositivePattern(pattern) {
return !isNegativePattern(pattern);
}
pattern$1.isPositivePattern = isPositivePattern;
function getNegativePatterns(patterns) {
return patterns.filter(isNegativePattern);
}
pattern$1.getNegativePatterns = getNegativePatterns;
function getPositivePatterns$1(patterns) {
return patterns.filter(isPositivePattern);
}
pattern$1.getPositivePatterns = getPositivePatterns$1;
/**
* Returns patterns that can be applied inside the current directory.
*
* @example
* // ['./*', '*', 'a/*']
* getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*'])
*/
function getPatternsInsideCurrentDirectory(patterns) {
return patterns.filter((pattern) => !isPatternRelatedToParentDirectory(pattern));
}
pattern$1.getPatternsInsideCurrentDirectory = getPatternsInsideCurrentDirectory;
/**
* Returns patterns to be expanded relative to (outside) the current directory.
*
* @example
* // ['../*', './../*']
* getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*'])
*/
function getPatternsOutsideCurrentDirectory(patterns) {
return patterns.filter(isPatternRelatedToParentDirectory);
}
pattern$1.getPatternsOutsideCurrentDirectory = getPatternsOutsideCurrentDirectory;
function isPatternRelatedToParentDirectory(pattern) {
return pattern.startsWith('..') || pattern.startsWith('./..');
}
pattern$1.isPatternRelatedToParentDirectory = isPatternRelatedToParentDirectory;
function getBaseDirectory(pattern) {
return globParent$1(pattern, { flipBackslashes: false });
}
pattern$1.getBaseDirectory = getBaseDirectory;
function hasGlobStar(pattern) {
return pattern.includes(GLOBSTAR$1);
}
pattern$1.hasGlobStar = hasGlobStar;
function endsWithSlashGlobStar(pattern) {
return pattern.endsWith('/' + GLOBSTAR$1);
}
pattern$1.endsWithSlashGlobStar = endsWithSlashGlobStar;
function isAffectDepthOfReadingPattern(pattern) {
const basename = path$g.basename(pattern);
return endsWithSlashGlobStar(pattern) || isStaticPattern(basename);
}
pattern$1.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern;
function expandPatternsWithBraceExpansion(patterns) {
return patterns.reduce((collection, pattern) => {
return collection.concat(expandBraceExpansion(pattern));
}, []);
}
pattern$1.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion;
function expandBraceExpansion(pattern) {
const patterns = micromatch.braces(pattern, { expand: true, nodupes: true, keepEscaping: true });
/**
* Sort the patterns by length so that the same depth patterns are processed side by side.
* `a/{b,}/{c,}/*` – `['a///*', 'a/b//*', 'a//c/*', 'a/b/c/*']`
*/
patterns.sort((a, b) => a.length - b.length);
/**
* Micromatch can return an empty string in the case of patterns like `{a,}`.
*/
return patterns.filter((pattern) => pattern !== '');
}
pattern$1.expandBraceExpansion = expandBraceExpansion;
function getPatternParts(pattern, options) {
let { parts } = micromatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true }));
/**
* The scan method returns an empty array in some cases.
* See micromatch/picomatch#58 for more details.
*/
if (parts.length === 0) {
parts = [pattern];
}
/**
* The scan method does not return an empty part for the pattern with a forward slash.
* This is another part of micromatch/picomatch#58.
*/
if (parts[0].startsWith('/')) {
parts[0] = parts[0].slice(1);
parts.unshift('');
}
return parts;
}
pattern$1.getPatternParts = getPatternParts;
function makeRe(pattern, options) {
return micromatch.makeRe(pattern, options);
}
pattern$1.makeRe = makeRe;
function convertPatternsToRe(patterns, options) {
return patterns.map((pattern) => makeRe(pattern, options));
}
pattern$1.convertPatternsToRe = convertPatternsToRe;
function matchAny(entry, patternsRe) {
return patternsRe.some((patternRe) => patternRe.test(entry));
}
pattern$1.matchAny = matchAny;
/**
* This package only works with forward slashes as a path separator.
* Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes.
*/
function removeDuplicateSlashes(pattern) {
return pattern.replace(DOUBLE_SLASH_RE$1, '/');
}
pattern$1.removeDuplicateSlashes = removeDuplicateSlashes;
var stream$4 = {};
/*
* merge2
* https://github.com/teambition/merge2
*
* Copyright (c) 2014-2020 Teambition
* Licensed under the MIT license.
*/
const Stream = require$$0$6;
const PassThrough = Stream.PassThrough;
const slice = Array.prototype.slice;
var merge2_1 = merge2$1;
function merge2$1 () {
const streamsQueue = [];
const args = slice.call(arguments);
let merging = false;
let options = args[args.length - 1];
if (options && !Array.isArray(options) && options.pipe == null) {
args.pop();
} else {
options = {};
}
const doEnd = options.end !== false;
const doPipeError = options.pipeError === true;
if (options.objectMode == null) {
options.objectMode = true;
}
if (options.highWaterMark == null) {
options.highWaterMark = 64 * 1024;
}
const mergedStream = PassThrough(options);
function addStream () {
for (let i = 0, len = arguments.length; i < len; i++) {
streamsQueue.push(pauseStreams(arguments[i], options));
}
mergeStream();
return this
}
function mergeStream () {
if (merging) {
return
}
merging = true;
let streams = streamsQueue.shift();
if (!streams) {
process.nextTick(endStream);
return
}
if (!Array.isArray(streams)) {
streams = [streams];
}
let pipesCount = streams.length + 1;
function next () {
if (--pipesCount > 0) {
return
}
merging = false;
mergeStream();
}
function pipe (stream) {
function onend () {
stream.removeListener('merge2UnpipeEnd', onend);
stream.removeListener('end', onend);
if (doPipeError) {
stream.removeListener('error', onerror);
}
next();
}
function onerror (err) {
mergedStream.emit('error', err);
}
// skip ended stream
if (stream._readableState.endEmitted) {
return next()
}
stream.on('merge2UnpipeEnd', onend);
stream.on('end', onend);
if (doPipeError) {
stream.on('error', onerror);
}
stream.pipe(mergedStream, { end: false });
// compatible for old stream
stream.resume();
}
for (let i = 0; i < streams.length; i++) {
pipe(streams[i]);
}
next();
}
function endStream () {
merging = false;
// emit 'queueDrain' when all streams merged.
mergedStream.emit('queueDrain');
if (doEnd) {
mergedStream.end();
}
}
mergedStream.setMaxListeners(0);
mergedStream.add = addStream;
mergedStream.on('unpipe', function (stream) {
stream.emit('merge2UnpipeEnd');
});
if (args.length) {
addStream.apply(null, args);
}
return mergedStream
}
// check and pause streams for pipe.
function pauseStreams (streams, options) {
if (!Array.isArray(streams)) {
// Backwards-compat with old-style streams
if (!streams._readableState && streams.pipe) {
streams = streams.pipe(PassThrough(options));
}
if (!streams._readableState || !streams.pause || !streams.pipe) {
throw new Error('Only readable stream can be merged.')
}
streams.pause();
} else {
for (let i = 0, len = streams.length; i < len; i++) {
streams[i] = pauseStreams(streams[i], options);
}
}
return streams
}
Object.defineProperty(stream$4, "__esModule", { value: true });
stream$4.merge = void 0;
const merge2 = merge2_1;
function merge$1(streams) {
const mergedStream = merge2(streams);
streams.forEach((stream) => {
stream.once('error', (error) => mergedStream.emit('error', error));
});
mergedStream.once('close', () => propagateCloseEventToSources(streams));
mergedStream.once('end', () => propagateCloseEventToSources(streams));
return mergedStream;
}
stream$4.merge = merge$1;
function propagateCloseEventToSources(streams) {
streams.forEach((stream) => stream.emit('close'));
}
var string$2 = {};
Object.defineProperty(string$2, "__esModule", { value: true });
string$2.isEmpty = string$2.isString = void 0;
function isString$1(input) {
return typeof input === 'string';
}
string$2.isString = isString$1;
function isEmpty$1(input) {
return input === '';
}
string$2.isEmpty = isEmpty$1;
Object.defineProperty(utils$g, "__esModule", { value: true });
utils$g.string = utils$g.stream = utils$g.pattern = utils$g.path = utils$g.fs = utils$g.errno = utils$g.array = void 0;
const array = array$1;
utils$g.array = array;
const errno = errno$1;
utils$g.errno = errno;
const fs$h = fs$i;
utils$g.fs = fs$h;
const path$f = path$i;
utils$g.path = path$f;
const pattern = pattern$1;
utils$g.pattern = pattern;
const stream$3 = stream$4;
utils$g.stream = stream$3;
const string$1 = string$2;
utils$g.string = string$1;
Object.defineProperty(tasks, "__esModule", { value: true });
tasks.convertPatternGroupToTask = tasks.convertPatternGroupsToTasks = tasks.groupPatternsByBaseDirectory = tasks.getNegativePatternsAsPositive = tasks.getPositivePatterns = tasks.convertPatternsToTasks = tasks.generate = void 0;
const utils$a = utils$g;
function generate(input, settings) {
const patterns = processPatterns(input, settings);
const ignore = processPatterns(settings.ignore, settings);
const positivePatterns = getPositivePatterns(patterns);
const negativePatterns = getNegativePatternsAsPositive(patterns, ignore);
const staticPatterns = positivePatterns.filter((pattern) => utils$a.pattern.isStaticPattern(pattern, settings));
const dynamicPatterns = positivePatterns.filter((pattern) => utils$a.pattern.isDynamicPattern(pattern, settings));
const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false);
const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true);
return staticTasks.concat(dynamicTasks);
}
tasks.generate = generate;
function processPatterns(input, settings) {
let patterns = input;
/**
* The original pattern like `{,*,**,a/*}` can lead to problems checking the depth when matching entry
* and some problems with the micromatch package (see fast-glob issues: #365, #394).
*
* To solve this problem, we expand all patterns containing brace expansion. This can lead to a slight slowdown
* in matching in the case of a large set of patterns after expansion.
*/
if (settings.braceExpansion) {
patterns = utils$a.pattern.expandPatternsWithBraceExpansion(patterns);
}
/**
* If the `baseNameMatch` option is enabled, we must add globstar to patterns, so that they can be used
* at any nesting level.
*
* We do this here, because otherwise we have to complicate the filtering logic. For example, we need to change
* the pattern in the filter before creating a regular expression. There is no need to change the patterns
* in the application. Only on the input.
*/
if (settings.baseNameMatch) {
patterns = patterns.map((pattern) => pattern.includes('/') ? pattern : `**/${pattern}`);
}
/**
* This method also removes duplicate slashes that may have been in the pattern or formed as a result of expansion.
*/
return patterns.map((pattern) => utils$a.pattern.removeDuplicateSlashes(pattern));
}
/**
* Returns tasks grouped by basic pattern directories.
*
* Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately.
* This is necessary because directory traversal starts at the base directory and goes deeper.
*/
function convertPatternsToTasks(positive, negative, dynamic) {
const tasks = [];
const patternsOutsideCurrentDirectory = utils$a.pattern.getPatternsOutsideCurrentDirectory(positive);
const patternsInsideCurrentDirectory = utils$a.pattern.getPatternsInsideCurrentDirectory(positive);
const outsideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsOutsideCurrentDirectory);
const insideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsInsideCurrentDirectory);
tasks.push(...convertPatternGroupsToTasks(outsideCurrentDirectoryGroup, negative, dynamic));
/*
* For the sake of reducing future accesses to the file system, we merge all tasks within the current directory
* into a global task, if at least one pattern refers to the root (`.`). In this case, the global task covers the rest.
*/
if ('.' in insideCurrentDirectoryGroup) {
tasks.push(convertPatternGroupToTask('.', patternsInsideCurrentDirectory, negative, dynamic));
}
else {
tasks.push(...convertPatternGroupsToTasks(insideCurrentDirectoryGroup, negative, dynamic));
}
return tasks;
}
tasks.convertPatternsToTasks = convertPatternsToTasks;
function getPositivePatterns(patterns) {
return utils$a.pattern.getPositivePatterns(patterns);
}
tasks.getPositivePatterns = getPositivePatterns;
function getNegativePatternsAsPositive(patterns, ignore) {
const negative = utils$a.pattern.getNegativePatterns(patterns).concat(ignore);
const positive = negative.map(utils$a.pattern.convertToPositivePattern);
return positive;
}
tasks.getNegativePatternsAsPositive = getNegativePatternsAsPositive;
function groupPatternsByBaseDirectory(patterns) {
const group = {};
return patterns.reduce((collection, pattern) => {
const base = utils$a.pattern.getBaseDirectory(pattern);
if (base in collection) {
collection[base].push(pattern);
}
else {
collection[base] = [pattern];
}
return collection;
}, group);
}
tasks.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory;
function convertPatternGroupsToTasks(positive, negative, dynamic) {
return Object.keys(positive).map((base) => {
return convertPatternGroupToTask(base, positive[base], negative, dynamic);
});
}
tasks.convertPatternGroupsToTasks = convertPatternGroupsToTasks;
function convertPatternGroupToTask(base, positive, negative, dynamic) {
return {
dynamic,
positive,
negative,
base,
patterns: [].concat(positive, negative.map(utils$a.pattern.convertToNegativePattern))
};
}
tasks.convertPatternGroupToTask = convertPatternGroupToTask;
var async$7 = {};
var async$6 = {};
var out$3 = {};
var async$5 = {};
var async$4 = {};
var out$2 = {};
var async$3 = {};
var out$1 = {};
var async$2 = {};
Object.defineProperty(async$2, "__esModule", { value: true });
async$2.read = void 0;
function read$3(path, settings, callback) {
settings.fs.lstat(path, (lstatError, lstat) => {
if (lstatError !== null) {
callFailureCallback$2(callback, lstatError);
return;
}
if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {
callSuccessCallback$2(callback, lstat);
return;
}
settings.fs.stat(path, (statError, stat) => {
if (statError !== null) {
if (settings.throwErrorOnBrokenSymbolicLink) {
callFailureCallback$2(callback, statError);
return;
}
callSuccessCallback$2(callback, lstat);
return;
}
if (settings.markSymbolicLink) {
stat.isSymbolicLink = () => true;
}
callSuccessCallback$2(callback, stat);
});
});
}
async$2.read = read$3;
function callFailureCallback$2(callback, error) {
callback(error);
}
function callSuccessCallback$2(callback, result) {
callback(null, result);
}
var sync$8 = {};
Object.defineProperty(sync$8, "__esModule", { value: true });
sync$8.read = void 0;
function read$2(path, settings) {
const lstat = settings.fs.lstatSync(path);
if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {
return lstat;
}
try {
const stat = settings.fs.statSync(path);
if (settings.markSymbolicLink) {
stat.isSymbolicLink = () => true;
}
return stat;
}
catch (error) {
if (!settings.throwErrorOnBrokenSymbolicLink) {
return lstat;
}
throw error;
}
}
sync$8.read = read$2;
var settings$3 = {};
var fs$g = {};
(function (exports) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
const fs = require$$0__default;
exports.FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
stat: fs.stat,
lstatSync: fs.lstatSync,
statSync: fs.statSync
};
function createFileSystemAdapter(fsMethods) {
if (fsMethods === undefined) {
return exports.FILE_SYSTEM_ADAPTER;
}
return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
}
exports.createFileSystemAdapter = createFileSystemAdapter;
} (fs$g));
Object.defineProperty(settings$3, "__esModule", { value: true });
const fs$f = fs$g;
let Settings$2 = class Settings {
constructor(_options = {}) {
this._options = _options;
this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true);
this.fs = fs$f.createFileSystemAdapter(this._options.fs);
this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
}
_getValue(option, value) {
return option !== null && option !== void 0 ? option : value;
}
};
settings$3.default = Settings$2;
Object.defineProperty(out$1, "__esModule", { value: true });
out$1.statSync = out$1.stat = out$1.Settings = void 0;
const async$1 = async$2;
const sync$7 = sync$8;
const settings_1$3 = settings$3;
out$1.Settings = settings_1$3.default;
function stat$4(path, optionsOrSettingsOrCallback, callback) {
if (typeof optionsOrSettingsOrCallback === 'function') {
async$1.read(path, getSettings$2(), optionsOrSettingsOrCallback);
return;
}
async$1.read(path, getSettings$2(optionsOrSettingsOrCallback), callback);
}
out$1.stat = stat$4;
function statSync(path, optionsOrSettings) {
const settings = getSettings$2(optionsOrSettings);
return sync$7.read(path, settings);
}
out$1.statSync = statSync;
function getSettings$2(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1$3.default) {
return settingsOrOptions;
}
return new settings_1$3.default(settingsOrOptions);
}
/*! queue-microtask. MIT License. Feross Aboukhadijeh <https://feross.org/opensource> */
let promise;
var queueMicrotask_1 = typeof queueMicrotask === 'function'
? queueMicrotask.bind(typeof window !== 'undefined' ? window : commonjsGlobal)
// reuse resolved promise, and allocate it lazily
: cb => (promise || (promise = Promise.resolve()))
.then(cb)
.catch(err => setTimeout(() => { throw err }, 0));
/*! run-parallel. MIT License. Feross Aboukhadijeh <https://feross.org/opensource> */
var runParallel_1 = runParallel;
const queueMicrotask$1 = queueMicrotask_1;
function runParallel (tasks, cb) {
let results, pending, keys;
let isSync = true;
if (Array.isArray(tasks)) {
results = [];
pending = tasks.length;
} else {
keys = Object.keys(tasks);
results = {};
pending = keys.length;
}
function done (err) {
function end () {
if (cb) cb(err, results);
cb = null;
}
if (isSync) queueMicrotask$1(end);
else end();
}
function each (i, err, result) {
results[i] = result;
if (--pending === 0 || err) {
done(err);
}
}
if (!pending) {
// empty
done(null);
} else if (keys) {
// object
keys.forEach(function (key) {
tasks[key](function (err, result) { each(key, err, result); });
});
} else {
// array
tasks.forEach(function (task, i) {
task(function (err, result) { each(i, err, result); });
});
}
isSync = false;
}
var constants$2 = {};
Object.defineProperty(constants$2, "__esModule", { value: true });
constants$2.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0;
const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.');
if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) {
throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`);
}
const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10);
const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10);
const SUPPORTED_MAJOR_VERSION = 10;
const SUPPORTED_MINOR_VERSION = 10;
const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION;
const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION;
/**
* IS `true` for Node.js 10.10 and greater.
*/
constants$2.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR;
var utils$9 = {};
var fs$e = {};
Object.defineProperty(fs$e, "__esModule", { value: true });
fs$e.createDirentFromStats = void 0;
class DirentFromStats {
constructor(name, stats) {
this.name = name;
this.isBlockDevice = stats.isBlockDevice.bind(stats);
this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
this.isDirectory = stats.isDirectory.bind(stats);
this.isFIFO = stats.isFIFO.bind(stats);
this.isFile = stats.isFile.bind(stats);
this.isSocket = stats.isSocket.bind(stats);
this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
}
}
function createDirentFromStats(name, stats) {
return new DirentFromStats(name, stats);
}
fs$e.createDirentFromStats = createDirentFromStats;
Object.defineProperty(utils$9, "__esModule", { value: true });
utils$9.fs = void 0;
const fs$d = fs$e;
utils$9.fs = fs$d;
var common$a = {};
Object.defineProperty(common$a, "__esModule", { value: true });
common$a.joinPathSegments = void 0;
function joinPathSegments$1(a, b, separator) {
/**
* The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).
*/
if (a.endsWith(separator)) {
return a + b;
}
return a + separator + b;
}
common$a.joinPathSegments = joinPathSegments$1;
Object.defineProperty(async$3, "__esModule", { value: true });
async$3.readdir = async$3.readdirWithFileTypes = async$3.read = void 0;
const fsStat$5 = out$1;
const rpl = runParallel_1;
const constants_1$1 = constants$2;
const utils$8 = utils$9;
const common$9 = common$a;
function read$1(directory, settings, callback) {
if (!settings.stats && constants_1$1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
readdirWithFileTypes$1(directory, settings, callback);
return;
}
readdir$3(directory, settings, callback);
}
async$3.read = read$1;
function readdirWithFileTypes$1(directory, settings, callback) {
settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => {
if (readdirError !== null) {
callFailureCallback$1(callback, readdirError);
return;
}
const entries = dirents.map((dirent) => ({
dirent,
name: dirent.name,
path: common$9.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)
}));
if (!settings.followSymbolicLinks) {
callSuccessCallback$1(callback, entries);
return;
}
const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings));
rpl(tasks, (rplError, rplEntries) => {
if (rplError !== null) {
callFailureCallback$1(callback, rplError);
return;
}
callSuccessCallback$1(callback, rplEntries);
});
});
}
async$3.readdirWithFileTypes = readdirWithFileTypes$1;
function makeRplTaskEntry(entry, settings) {
return (done) => {
if (!entry.dirent.isSymbolicLink()) {
done(null, entry);
return;
}
settings.fs.stat(entry.path, (statError, stats) => {
if (statError !== null) {
if (settings.throwErrorOnBrokenSymbolicLink) {
done(statError);
return;
}
done(null, entry);
return;
}
entry.dirent = utils$8.fs.createDirentFromStats(entry.name, stats);
done(null, entry);
});
};
}
function readdir$3(directory, settings, callback) {
settings.fs.readdir(directory, (readdirError, names) => {
if (readdirError !== null) {
callFailureCallback$1(callback, readdirError);
return;
}
const tasks = names.map((name) => {
const path = common$9.joinPathSegments(directory, name, settings.pathSegmentSeparator);
return (done) => {
fsStat$5.stat(path, settings.fsStatSettings, (error, stats) => {
if (error !== null) {
done(error);
return;
}
const entry = {
name,
path,
dirent: utils$8.fs.createDirentFromStats(name, stats)
};
if (settings.stats) {
entry.stats = stats;
}
done(null, entry);
});
};
});
rpl(tasks, (rplError, entries) => {
if (rplError !== null) {
callFailureCallback$1(callback, rplError);
return;
}
callSuccessCallback$1(callback, entries);
});
});
}
async$3.readdir = readdir$3;
function callFailureCallback$1(callback, error) {
callback(error);
}
function callSuccessCallback$1(callback, result) {
callback(null, result);
}
var sync$6 = {};
Object.defineProperty(sync$6, "__esModule", { value: true });
sync$6.readdir = sync$6.readdirWithFileTypes = sync$6.read = void 0;
const fsStat$4 = out$1;
const constants_1 = constants$2;
const utils$7 = utils$9;
const common$8 = common$a;
function read(directory, settings) {
if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
return readdirWithFileTypes(directory, settings);
}
return readdir$2(directory, settings);
}
sync$6.read = read;
function readdirWithFileTypes(directory, settings) {
const dirents = settings.fs.readdirSync(directory, { withFileTypes: true });
return dirents.map((dirent) => {
const entry = {
dirent,
name: dirent.name,
path: common$8.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)
};
if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) {
try {
const stats = settings.fs.statSync(entry.path);
entry.dirent = utils$7.fs.createDirentFromStats(entry.name, stats);
}
catch (error) {
if (settings.throwErrorOnBrokenSymbolicLink) {
throw error;
}
}
}
return entry;
});
}
sync$6.readdirWithFileTypes = readdirWithFileTypes;
function readdir$2(directory, settings) {
const names = settings.fs.readdirSync(directory);
return names.map((name) => {
const entryPath = common$8.joinPathSegments(directory, name, settings.pathSegmentSeparator);
const stats = fsStat$4.statSync(entryPath, settings.fsStatSettings);
const entry = {
name,
path: entryPath,
dirent: utils$7.fs.createDirentFromStats(name, stats)
};
if (settings.stats) {
entry.stats = stats;
}
return entry;
});
}
sync$6.readdir = readdir$2;
var settings$2 = {};
var fs$c = {};
(function (exports) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
const fs = require$$0__default;
exports.FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
stat: fs.stat,
lstatSync: fs.lstatSync,
statSync: fs.statSync,
readdir: fs.readdir,
readdirSync: fs.readdirSync
};
function createFileSystemAdapter(fsMethods) {
if (fsMethods === undefined) {
return exports.FILE_SYSTEM_ADAPTER;
}
return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
}
exports.createFileSystemAdapter = createFileSystemAdapter;
} (fs$c));
Object.defineProperty(settings$2, "__esModule", { value: true });
const path$e = require$$0$4;
const fsStat$3 = out$1;
const fs$b = fs$c;
let Settings$1 = class Settings {
constructor(_options = {}) {
this._options = _options;
this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false);
this.fs = fs$b.createFileSystemAdapter(this._options.fs);
this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path$e.sep);
this.stats = this._getValue(this._options.stats, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
this.fsStatSettings = new fsStat$3.Settings({
followSymbolicLink: this.followSymbolicLinks,
fs: this.fs,
throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink
});
}
_getValue(option, value) {
return option !== null && option !== void 0 ? option : value;
}
};
settings$2.default = Settings$1;
Object.defineProperty(out$2, "__esModule", { value: true });
out$2.Settings = out$2.scandirSync = out$2.scandir = void 0;
const async = async$3;
const sync$5 = sync$6;
const settings_1$2 = settings$2;
out$2.Settings = settings_1$2.default;
function scandir(path, optionsOrSettingsOrCallback, callback) {
if (typeof optionsOrSettingsOrCallback === 'function') {
async.read(path, getSettings$1(), optionsOrSettingsOrCallback);
return;
}
async.read(path, getSettings$1(optionsOrSettingsOrCallback), callback);
}
out$2.scandir = scandir;
function scandirSync(path, optionsOrSettings) {
const settings = getSettings$1(optionsOrSettings);
return sync$5.read(path, settings);
}
out$2.scandirSync = scandirSync;
function getSettings$1(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1$2.default) {
return settingsOrOptions;
}
return new settings_1$2.default(settingsOrOptions);
}
var queue = {exports: {}};
function reusify$1 (Constructor) {
var head = new Constructor();
var tail = head;
function get () {
var current = head;
if (current.next) {
head = current.next;
} else {
head = new Constructor();
tail = head;
}
current.next = null;
return current
}
function release (obj) {
tail.next = obj;
tail = obj;
}
return {
get: get,
release: release
}
}
var reusify_1 = reusify$1;
/* eslint-disable no-var */
var reusify = reusify_1;
function fastqueue (context, worker, _concurrency) {
if (typeof context === 'function') {
_concurrency = worker;
worker = context;
context = null;
}
if (!(_concurrency >= 1)) {
throw new Error('fastqueue concurrency must be equal to or greater than 1')
}
var cache = reusify(Task);
var queueHead = null;
var queueTail = null;
var _running = 0;
var errorHandler = null;
var self = {
push: push,
drain: noop$4,
saturated: noop$4,
pause: pause,
paused: false,
get concurrency () {
return _concurrency
},
set concurrency (value) {
if (!(value >= 1)) {
throw new Error('fastqueue concurrency must be equal to or greater than 1')
}
_concurrency = value;
if (self.paused) return
for (; queueHead && _running < _concurrency;) {
_running++;
release();
}
},
running: running,
resume: resume,
idle: idle,
length: length,
getQueue: getQueue,
unshift: unshift,
empty: noop$4,
kill: kill,
killAndDrain: killAndDrain,
error: error
};
return self
function running () {
return _running
}
function pause () {
self.paused = true;
}
function length () {
var current = queueHead;
var counter = 0;
while (current) {
current = current.next;
counter++;
}
return counter
}
function getQueue () {
var current = queueHead;
var tasks = [];
while (current) {
tasks.push(current.value);
current = current.next;
}
return tasks
}
function resume () {
if (!self.paused) return
self.paused = false;
if (queueHead === null) {
_running++;
release();
return
}
for (; queueHead && _running < _concurrency;) {
_running++;
release();
}
}
function idle () {
return _running === 0 && self.length() === 0
}
function push (value, done) {
var current = cache.get();
current.context = context;
current.release = release;
current.value = value;
current.callback = done || noop$4;
current.errorHandler = errorHandler;
if (_running >= _concurrency || self.paused) {
if (queueTail) {
queueTail.next = current;
queueTail = current;
} else {
queueHead = current;
queueTail = current;
self.saturated();
}
} else {
_running++;
worker.call(context, current.value, current.worked);
}
}
function unshift (value, done) {
var current = cache.get();
current.context = context;
current.release = release;
current.value = value;
current.callback = done || noop$4;
current.errorHandler = errorHandler;
if (_running >= _concurrency || self.paused) {
if (queueHead) {
current.next = queueHead;
queueHead = current;
} else {
queueHead = current;
queueTail = current;
self.saturated();
}
} else {
_running++;
worker.call(context, current.value, current.worked);
}
}
function release (holder) {
if (holder) {
cache.release(holder);
}
var next = queueHead;
if (next && _running <= _concurrency) {
if (!self.paused) {
if (queueTail === queueHead) {
queueTail = null;
}
queueHead = next.next;
next.next = null;
worker.call(context, next.value, next.worked);
if (queueTail === null) {
self.empty();
}
} else {
_running--;
}
} else if (--_running === 0) {
self.drain();
}
}
function kill () {
queueHead = null;
queueTail = null;
self.drain = noop$4;
}
function killAndDrain () {
queueHead = null;
queueTail = null;
self.drain();
self.drain = noop$4;
}
function error (handler) {
errorHandler = handler;
}
}
function noop$4 () {}
function Task () {
this.value = null;
this.callback = noop$4;
this.next = null;
this.release = noop$4;
this.context = null;
this.errorHandler = null;
var self = this;
this.worked = function worked (err, result) {
var callback = self.callback;
var errorHandler = self.errorHandler;
var val = self.value;
self.value = null;
self.callback = noop$4;
if (self.errorHandler) {
errorHandler(err, val);
}
callback.call(self.context, err, result);
self.release(self);
};
}
function queueAsPromised (context, worker, _concurrency) {
if (typeof context === 'function') {
_concurrency = worker;
worker = context;
context = null;
}
function asyncWrapper (arg, cb) {
worker.call(this, arg)
.then(function (res) {
cb(null, res);
}, cb);
}
var queue = fastqueue(context, asyncWrapper, _concurrency);
var pushCb = queue.push;
var unshiftCb = queue.unshift;
queue.push = push;
queue.unshift = unshift;
queue.drained = drained;
return queue
function push (value) {
var p = new Promise(function (resolve, reject) {
pushCb(value, function (err, result) {
if (err) {
reject(err);
return
}
resolve(result);
});
});
// Let's fork the promise chain to
// make the error bubble up to the user but
// not lead to a unhandledRejection
p.catch(noop$4);
return p
}
function unshift (value) {
var p = new Promise(function (resolve, reject) {
unshiftCb(value, function (err, result) {
if (err) {
reject(err);
return
}
resolve(result);
});
});
// Let's fork the promise chain to
// make the error bubble up to the user but
// not lead to a unhandledRejection
p.catch(noop$4);
return p
}
function drained () {
if (queue.idle()) {
return new Promise(function (resolve) {
resolve();
})
}
var previousDrain = queue.drain;
var p = new Promise(function (resolve) {
queue.drain = function () {
previousDrain();
resolve();
};
});
return p
}
}
queue.exports = fastqueue;
queue.exports.promise = queueAsPromised;
var queueExports = queue.exports;
var common$7 = {};
Object.defineProperty(common$7, "__esModule", { value: true });
common$7.joinPathSegments = common$7.replacePathSegmentSeparator = common$7.isAppliedFilter = common$7.isFatalError = void 0;
function isFatalError(settings, error) {
if (settings.errorFilter === null) {
return true;
}
return !settings.errorFilter(error);
}
common$7.isFatalError = isFatalError;
function isAppliedFilter(filter, value) {
return filter === null || filter(value);
}
common$7.isAppliedFilter = isAppliedFilter;
function replacePathSegmentSeparator(filepath, separator) {
return filepath.split(/[/\\]/).join(separator);
}
common$7.replacePathSegmentSeparator = replacePathSegmentSeparator;
function joinPathSegments(a, b, separator) {
if (a === '') {
return b;
}
/**
* The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).
*/
if (a.endsWith(separator)) {
return a + b;
}
return a + separator + b;
}
common$7.joinPathSegments = joinPathSegments;
var reader$1 = {};
Object.defineProperty(reader$1, "__esModule", { value: true });
const common$6 = common$7;
let Reader$1 = class Reader {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._root = common$6.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator);
}
};
reader$1.default = Reader$1;
Object.defineProperty(async$4, "__esModule", { value: true });
const events_1 = require$$0$7;
const fsScandir$2 = out$2;
const fastq = queueExports;
const common$5 = common$7;
const reader_1$4 = reader$1;
class AsyncReader extends reader_1$4.default {
constructor(_root, _settings) {
super(_root, _settings);
this._settings = _settings;
this._scandir = fsScandir$2.scandir;
this._emitter = new events_1.EventEmitter();
this._queue = fastq(this._worker.bind(this), this._settings.concurrency);
this._isFatalError = false;
this._isDestroyed = false;
this._queue.drain = () => {
if (!this._isFatalError) {
this._emitter.emit('end');
}
};
}
read() {
this._isFatalError = false;
this._isDestroyed = false;
setImmediate(() => {
this._pushToQueue(this._root, this._settings.basePath);
});
return this._emitter;
}
get isDestroyed() {
return this._isDestroyed;
}
destroy() {
if (this._isDestroyed) {
throw new Error('The reader is already destroyed');
}
this._isDestroyed = true;
this._queue.killAndDrain();
}
onEntry(callback) {
this._emitter.on('entry', callback);
}
onError(callback) {
this._emitter.once('error', callback);
}
onEnd(callback) {
this._emitter.once('end', callback);
}
_pushToQueue(directory, base) {
const queueItem = { directory, base };
this._queue.push(queueItem, (error) => {
if (error !== null) {
this._handleError(error);
}
});
}
_worker(item, done) {
this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => {
if (error !== null) {
done(error, undefined);
return;
}
for (const entry of entries) {
this._handleEntry(entry, item.base);
}
done(null, undefined);
});
}
_handleError(error) {
if (this._isDestroyed || !common$5.isFatalError(this._settings, error)) {
return;
}
this._isFatalError = true;
this._isDestroyed = true;
this._emitter.emit('error', error);
}
_handleEntry(entry, base) {
if (this._isDestroyed || this._isFatalError) {
return;
}
const fullpath = entry.path;
if (base !== undefined) {
entry.path = common$5.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);
}
if (common$5.isAppliedFilter(this._settings.entryFilter, entry)) {
this._emitEntry(entry);
}
if (entry.dirent.isDirectory() && common$5.isAppliedFilter(this._settings.deepFilter, entry)) {
this._pushToQueue(fullpath, base === undefined ? undefined : entry.path);
}
}
_emitEntry(entry) {
this._emitter.emit('entry', entry);
}
}
async$4.default = AsyncReader;
Object.defineProperty(async$5, "__esModule", { value: true });
const async_1$4 = async$4;
class AsyncProvider {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._reader = new async_1$4.default(this._root, this._settings);
this._storage = [];
}
read(callback) {
this._reader.onError((error) => {
callFailureCallback(callback, error);
});
this._reader.onEntry((entry) => {
this._storage.push(entry);
});
this._reader.onEnd(() => {
callSuccessCallback(callback, this._storage);
});
this._reader.read();
}
}
async$5.default = AsyncProvider;
function callFailureCallback(callback, error) {
callback(error);
}
function callSuccessCallback(callback, entries) {
callback(null, entries);
}
var stream$2 = {};
Object.defineProperty(stream$2, "__esModule", { value: true });
const stream_1$5 = require$$0$6;
const async_1$3 = async$4;
class StreamProvider {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._reader = new async_1$3.default(this._root, this._settings);
this._stream = new stream_1$5.Readable({
objectMode: true,
read: () => { },
destroy: () => {
if (!this._reader.isDestroyed) {
this._reader.destroy();
}
}
});
}
read() {
this._reader.onError((error) => {
this._stream.emit('error', error);
});
this._reader.onEntry((entry) => {
this._stream.push(entry);
});
this._reader.onEnd(() => {
this._stream.push(null);
});
this._reader.read();
return this._stream;
}
}
stream$2.default = StreamProvider;
var sync$4 = {};
var sync$3 = {};
Object.defineProperty(sync$3, "__esModule", { value: true });
const fsScandir$1 = out$2;
const common$4 = common$7;
const reader_1$3 = reader$1;
class SyncReader extends reader_1$3.default {
constructor() {
super(...arguments);
this._scandir = fsScandir$1.scandirSync;
this._storage = [];
this._queue = new Set();
}
read() {
this._pushToQueue(this._root, this._settings.basePath);
this._handleQueue();
return this._storage;
}
_pushToQueue(directory, base) {
this._queue.add({ directory, base });
}
_handleQueue() {
for (const item of this._queue.values()) {
this._handleDirectory(item.directory, item.base);
}
}
_handleDirectory(directory, base) {
try {
const entries = this._scandir(directory, this._settings.fsScandirSettings);
for (const entry of entries) {
this._handleEntry(entry, base);
}
}
catch (error) {
this._handleError(error);
}
}
_handleError(error) {
if (!common$4.isFatalError(this._settings, error)) {
return;
}
throw error;
}
_handleEntry(entry, base) {
const fullpath = entry.path;
if (base !== undefined) {
entry.path = common$4.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);
}
if (common$4.isAppliedFilter(this._settings.entryFilter, entry)) {
this._pushToStorage(entry);
}
if (entry.dirent.isDirectory() && common$4.isAppliedFilter(this._settings.deepFilter, entry)) {
this._pushToQueue(fullpath, base === undefined ? undefined : entry.path);
}
}
_pushToStorage(entry) {
this._storage.push(entry);
}
}
sync$3.default = SyncReader;
Object.defineProperty(sync$4, "__esModule", { value: true });
const sync_1$3 = sync$3;
class SyncProvider {
constructor(_root, _settings) {
this._root = _root;
this._settings = _settings;
this._reader = new sync_1$3.default(this._root, this._settings);
}
read() {
return this._reader.read();
}
}
sync$4.default = SyncProvider;
var settings$1 = {};
Object.defineProperty(settings$1, "__esModule", { value: true });
const path$d = require$$0$4;
const fsScandir = out$2;
class Settings {
constructor(_options = {}) {
this._options = _options;
this.basePath = this._getValue(this._options.basePath, undefined);
this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY);
this.deepFilter = this._getValue(this._options.deepFilter, null);
this.entryFilter = this._getValue(this._options.entryFilter, null);
this.errorFilter = this._getValue(this._options.errorFilter, null);
this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path$d.sep);
this.fsScandirSettings = new fsScandir.Settings({
followSymbolicLinks: this._options.followSymbolicLinks,
fs: this._options.fs,
pathSegmentSeparator: this._options.pathSegmentSeparator,
stats: this._options.stats,
throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink
});
}
_getValue(option, value) {
return option !== null && option !== void 0 ? option : value;
}
}
settings$1.default = Settings;
Object.defineProperty(out$3, "__esModule", { value: true });
out$3.Settings = out$3.walkStream = out$3.walkSync = out$3.walk = void 0;
const async_1$2 = async$5;
const stream_1$4 = stream$2;
const sync_1$2 = sync$4;
const settings_1$1 = settings$1;
out$3.Settings = settings_1$1.default;
function walk$2(directory, optionsOrSettingsOrCallback, callback) {
if (typeof optionsOrSettingsOrCallback === 'function') {
new async_1$2.default(directory, getSettings()).read(optionsOrSettingsOrCallback);
return;
}
new async_1$2.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback);
}
out$3.walk = walk$2;
function walkSync(directory, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
const provider = new sync_1$2.default(directory, settings);
return provider.read();
}
out$3.walkSync = walkSync;
function walkStream(directory, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
const provider = new stream_1$4.default(directory, settings);
return provider.read();
}
out$3.walkStream = walkStream;
function getSettings(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1$1.default) {
return settingsOrOptions;
}
return new settings_1$1.default(settingsOrOptions);
}
var reader = {};
Object.defineProperty(reader, "__esModule", { value: true });
const path$c = require$$0$4;
const fsStat$2 = out$1;
const utils$6 = utils$g;
class Reader {
constructor(_settings) {
this._settings = _settings;
this._fsStatSettings = new fsStat$2.Settings({
followSymbolicLink: this._settings.followSymbolicLinks,
fs: this._settings.fs,
throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks
});
}
_getFullEntryPath(filepath) {
return path$c.resolve(this._settings.cwd, filepath);
}
_makeEntry(stats, pattern) {
const entry = {
name: pattern,
path: pattern,
dirent: utils$6.fs.createDirentFromStats(pattern, stats)
};
if (this._settings.stats) {
entry.stats = stats;
}
return entry;
}
_isFatalError(error) {
return !utils$6.errno.isEnoentCodeError(error) && !this._settings.suppressErrors;
}
}
reader.default = Reader;
var stream$1 = {};
Object.defineProperty(stream$1, "__esModule", { value: true });
const stream_1$3 = require$$0$6;
const fsStat$1 = out$1;
const fsWalk$2 = out$3;
const reader_1$2 = reader;
class ReaderStream extends reader_1$2.default {
constructor() {
super(...arguments);
this._walkStream = fsWalk$2.walkStream;
this._stat = fsStat$1.stat;
}
dynamic(root, options) {
return this._walkStream(root, options);
}
static(patterns, options) {
const filepaths = patterns.map(this._getFullEntryPath, this);
const stream = new stream_1$3.PassThrough({ objectMode: true });
stream._write = (index, _enc, done) => {
return this._getEntry(filepaths[index], patterns[index], options)
.then((entry) => {
if (entry !== null && options.entryFilter(entry)) {
stream.push(entry);
}
if (index === filepaths.length - 1) {
stream.end();
}
done();
})
.catch(done);
};
for (let i = 0; i < filepaths.length; i++) {
stream.write(i);
}
return stream;
}
_getEntry(filepath, pattern, options) {
return this._getStat(filepath)
.then((stats) => this._makeEntry(stats, pattern))
.catch((error) => {
if (options.errorFilter(error)) {
return null;
}
throw error;
});
}
_getStat(filepath) {
return new Promise((resolve, reject) => {
this._stat(filepath, this._fsStatSettings, (error, stats) => {
return error === null ? resolve(stats) : reject(error);
});
});
}
}
stream$1.default = ReaderStream;
Object.defineProperty(async$6, "__esModule", { value: true });
const fsWalk$1 = out$3;
const reader_1$1 = reader;
const stream_1$2 = stream$1;
class ReaderAsync extends reader_1$1.default {
constructor() {
super(...arguments);
this._walkAsync = fsWalk$1.walk;
this._readerStream = new stream_1$2.default(this._settings);
}
dynamic(root, options) {
return new Promise((resolve, reject) => {
this._walkAsync(root, options, (error, entries) => {
if (error === null) {
resolve(entries);
}
else {
reject(error);
}
});
});
}
async static(patterns, options) {
const entries = [];
const stream = this._readerStream.static(patterns, options);
// After #235, replace it with an asynchronous iterator.
return new Promise((resolve, reject) => {
stream.once('error', reject);
stream.on('data', (entry) => entries.push(entry));
stream.once('end', () => resolve(entries));
});
}
}
async$6.default = ReaderAsync;
var provider = {};
var deep = {};
var partial = {};
var matcher = {};
Object.defineProperty(matcher, "__esModule", { value: true });
const utils$5 = utils$g;
class Matcher {
constructor(_patterns, _settings, _micromatchOptions) {
this._patterns = _patterns;
this._settings = _settings;
this._micromatchOptions = _micromatchOptions;
this._storage = [];
this._fillStorage();
}
_fillStorage() {
for (const pattern of this._patterns) {
const segments = this._getPatternSegments(pattern);
const sections = this._splitSegmentsIntoSections(segments);
this._storage.push({
complete: sections.length <= 1,
pattern,
segments,
sections
});
}
}
_getPatternSegments(pattern) {
const parts = utils$5.pattern.getPatternParts(pattern, this._micromatchOptions);
return parts.map((part) => {
const dynamic = utils$5.pattern.isDynamicPattern(part, this._settings);
if (!dynamic) {
return {
dynamic: false,
pattern: part
};
}
return {
dynamic: true,
pattern: part,
patternRe: utils$5.pattern.makeRe(part, this._micromatchOptions)
};
});
}
_splitSegmentsIntoSections(segments) {
return utils$5.array.splitWhen(segments, (segment) => segment.dynamic && utils$5.pattern.hasGlobStar(segment.pattern));
}
}
matcher.default = Matcher;
Object.defineProperty(partial, "__esModule", { value: true });
const matcher_1 = matcher;
class PartialMatcher extends matcher_1.default {
match(filepath) {
const parts = filepath.split('/');
const levels = parts.length;
const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels);
for (const pattern of patterns) {
const section = pattern.sections[0];
/**
* In this case, the pattern has a globstar and we must read all directories unconditionally,
* but only if the level has reached the end of the first group.
*
* fixtures/{a,b}/**
* ^ true/false ^ always true
*/
if (!pattern.complete && levels > section.length) {
return true;
}
const match = parts.every((part, index) => {
const segment = pattern.segments[index];
if (segment.dynamic && segment.patternRe.test(part)) {
return true;
}
if (!segment.dynamic && segment.pattern === part) {
return true;
}
return false;
});
if (match) {
return true;
}
}
return false;
}
}
partial.default = PartialMatcher;
Object.defineProperty(deep, "__esModule", { value: true });
const utils$4 = utils$g;
const partial_1 = partial;
class DeepFilter {
constructor(_settings, _micromatchOptions) {
this._settings = _settings;
this._micromatchOptions = _micromatchOptions;
}
getFilter(basePath, positive, negative) {
const matcher = this._getMatcher(positive);
const negativeRe = this._getNegativePatternsRe(negative);
return (entry) => this._filter(basePath, entry, matcher, negativeRe);
}
_getMatcher(patterns) {
return new partial_1.default(patterns, this._settings, this._micromatchOptions);
}
_getNegativePatternsRe(patterns) {
const affectDepthOfReadingPatterns = patterns.filter(utils$4.pattern.isAffectDepthOfReadingPattern);
return utils$4.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions);
}
_filter(basePath, entry, matcher, negativeRe) {
if (this._isSkippedByDeep(basePath, entry.path)) {
return false;
}
if (this._isSkippedSymbolicLink(entry)) {
return false;
}
const filepath = utils$4.path.removeLeadingDotSegment(entry.path);
if (this._isSkippedByPositivePatterns(filepath, matcher)) {
return false;
}
return this._isSkippedByNegativePatterns(filepath, negativeRe);
}
_isSkippedByDeep(basePath, entryPath) {
/**
* Avoid unnecessary depth calculations when it doesn't matter.
*/
if (this._settings.deep === Infinity) {
return false;
}
return this._getEntryLevel(basePath, entryPath) >= this._settings.deep;
}
_getEntryLevel(basePath, entryPath) {
const entryPathDepth = entryPath.split('/').length;
if (basePath === '') {
return entryPathDepth;
}
const basePathDepth = basePath.split('/').length;
return entryPathDepth - basePathDepth;
}
_isSkippedSymbolicLink(entry) {
return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();
}
_isSkippedByPositivePatterns(entryPath, matcher) {
return !this._settings.baseNameMatch && !matcher.match(entryPath);
}
_isSkippedByNegativePatterns(entryPath, patternsRe) {
return !utils$4.pattern.matchAny(entryPath, patternsRe);
}
}
deep.default = DeepFilter;
var entry$1 = {};
Object.defineProperty(entry$1, "__esModule", { value: true });
const utils$3 = utils$g;
class EntryFilter {
constructor(_settings, _micromatchOptions) {
this._settings = _settings;
this._micromatchOptions = _micromatchOptions;
this.index = new Map();
}
getFilter(positive, negative) {
const positiveRe = utils$3.pattern.convertPatternsToRe(positive, this._micromatchOptions);
const negativeRe = utils$3.pattern.convertPatternsToRe(negative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true }));
return (entry) => this._filter(entry, positiveRe, negativeRe);
}
_filter(entry, positiveRe, negativeRe) {
const filepath = utils$3.path.removeLeadingDotSegment(entry.path);
if (this._settings.unique && this._isDuplicateEntry(filepath)) {
return false;
}
if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) {
return false;
}
if (this._isSkippedByAbsoluteNegativePatterns(filepath, negativeRe)) {
return false;
}
const isDirectory = entry.dirent.isDirectory();
const isMatched = this._isMatchToPatterns(filepath, positiveRe, isDirectory) && !this._isMatchToPatterns(filepath, negativeRe, isDirectory);
if (this._settings.unique && isMatched) {
this._createIndexRecord(filepath);
}
return isMatched;
}
_isDuplicateEntry(filepath) {
return this.index.has(filepath);
}
_createIndexRecord(filepath) {
this.index.set(filepath, undefined);
}
_onlyFileFilter(entry) {
return this._settings.onlyFiles && !entry.dirent.isFile();
}
_onlyDirectoryFilter(entry) {
return this._settings.onlyDirectories && !entry.dirent.isDirectory();
}
_isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) {
if (!this._settings.absolute) {
return false;
}
const fullpath = utils$3.path.makeAbsolute(this._settings.cwd, entryPath);
return utils$3.pattern.matchAny(fullpath, patternsRe);
}
_isMatchToPatterns(filepath, patternsRe, isDirectory) {
// Trying to match files and directories by patterns.
const isMatched = utils$3.pattern.matchAny(filepath, patternsRe);
// A pattern with a trailling slash can be used for directory matching.
// To apply such pattern, we need to add a tralling slash to the path.
if (!isMatched && isDirectory) {
return utils$3.pattern.matchAny(filepath + '/', patternsRe);
}
return isMatched;
}
}
entry$1.default = EntryFilter;
var error$1 = {};
Object.defineProperty(error$1, "__esModule", { value: true });
const utils$2 = utils$g;
class ErrorFilter {
constructor(_settings) {
this._settings = _settings;
}
getFilter() {
return (error) => this._isNonFatalError(error);
}
_isNonFatalError(error) {
return utils$2.errno.isEnoentCodeError(error) || this._settings.suppressErrors;
}
}
error$1.default = ErrorFilter;
var entry = {};
Object.defineProperty(entry, "__esModule", { value: true });
const utils$1 = utils$g;
class EntryTransformer {
constructor(_settings) {
this._settings = _settings;
}
getTransformer() {
return (entry) => this._transform(entry);
}
_transform(entry) {
let filepath = entry.path;
if (this._settings.absolute) {
filepath = utils$1.path.makeAbsolute(this._settings.cwd, filepath);
filepath = utils$1.path.unixify(filepath);
}
if (this._settings.markDirectories && entry.dirent.isDirectory()) {
filepath += '/';
}
if (!this._settings.objectMode) {
return filepath;
}
return Object.assign(Object.assign({}, entry), { path: filepath });
}
}
entry.default = EntryTransformer;
Object.defineProperty(provider, "__esModule", { value: true });
const path$b = require$$0$4;
const deep_1 = deep;
const entry_1 = entry$1;
const error_1 = error$1;
const entry_2 = entry;
class Provider {
constructor(_settings) {
this._settings = _settings;
this.errorFilter = new error_1.default(this._settings);
this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions());
this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions());
this.entryTransformer = new entry_2.default(this._settings);
}
_getRootDirectory(task) {
return path$b.resolve(this._settings.cwd, task.base);
}
_getReaderOptions(task) {
const basePath = task.base === '.' ? '' : task.base;
return {
basePath,
pathSegmentSeparator: '/',
concurrency: this._settings.concurrency,
deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative),
entryFilter: this.entryFilter.getFilter(task.positive, task.negative),
errorFilter: this.errorFilter.getFilter(),
followSymbolicLinks: this._settings.followSymbolicLinks,
fs: this._settings.fs,
stats: this._settings.stats,
throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink,
transform: this.entryTransformer.getTransformer()
};
}
_getMicromatchOptions() {
return {
dot: this._settings.dot,
matchBase: this._settings.baseNameMatch,
nobrace: !this._settings.braceExpansion,
nocase: !this._settings.caseSensitiveMatch,
noext: !this._settings.extglob,
noglobstar: !this._settings.globstar,
posix: true,
strictSlashes: false
};
}
}
provider.default = Provider;
Object.defineProperty(async$7, "__esModule", { value: true });
const async_1$1 = async$6;
const provider_1$2 = provider;
class ProviderAsync extends provider_1$2.default {
constructor() {
super(...arguments);
this._reader = new async_1$1.default(this._settings);
}
async read(task) {
const root = this._getRootDirectory(task);
const options = this._getReaderOptions(task);
const entries = await this.api(root, task, options);
return entries.map((entry) => options.transform(entry));
}
api(root, task, options) {
if (task.dynamic) {
return this._reader.dynamic(root, options);
}
return this._reader.static(task.patterns, options);
}
}
async$7.default = ProviderAsync;
var stream = {};
Object.defineProperty(stream, "__esModule", { value: true });
const stream_1$1 = require$$0$6;
const stream_2 = stream$1;
const provider_1$1 = provider;
class ProviderStream extends provider_1$1.default {
constructor() {
super(...arguments);
this._reader = new stream_2.default(this._settings);
}
read(task) {
const root = this._getRootDirectory(task);
const options = this._getReaderOptions(task);
const source = this.api(root, task, options);
const destination = new stream_1$1.Readable({ objectMode: true, read: () => { } });
source
.once('error', (error) => destination.emit('error', error))
.on('data', (entry) => destination.emit('data', options.transform(entry)))
.once('end', () => destination.emit('end'));
destination
.once('close', () => source.destroy());
return destination;
}
api(root, task, options) {
if (task.dynamic) {
return this._reader.dynamic(root, options);
}
return this._reader.static(task.patterns, options);
}
}
stream.default = ProviderStream;
var sync$2 = {};
var sync$1 = {};
Object.defineProperty(sync$1, "__esModule", { value: true });
const fsStat = out$1;
const fsWalk = out$3;
const reader_1 = reader;
class ReaderSync extends reader_1.default {
constructor() {
super(...arguments);
this._walkSync = fsWalk.walkSync;
this._statSync = fsStat.statSync;
}
dynamic(root, options) {
return this._walkSync(root, options);
}
static(patterns, options) {
const entries = [];
for (const pattern of patterns) {
const filepath = this._getFullEntryPath(pattern);
const entry = this._getEntry(filepath, pattern, options);
if (entry === null || !options.entryFilter(entry)) {
continue;
}
entries.push(entry);
}
return entries;
}
_getEntry(filepath, pattern, options) {
try {
const stats = this._getStat(filepath);
return this._makeEntry(stats, pattern);
}
catch (error) {
if (options.errorFilter(error)) {
return null;
}
throw error;
}
}
_getStat(filepath) {
return this._statSync(filepath, this._fsStatSettings);
}
}
sync$1.default = ReaderSync;
Object.defineProperty(sync$2, "__esModule", { value: true });
const sync_1$1 = sync$1;
const provider_1 = provider;
class ProviderSync extends provider_1.default {
constructor() {
super(...arguments);
this._reader = new sync_1$1.default(this._settings);
}
read(task) {
const root = this._getRootDirectory(task);
const options = this._getReaderOptions(task);
const entries = this.api(root, task, options);
return entries.map(options.transform);
}
api(root, task, options) {
if (task.dynamic) {
return this._reader.dynamic(root, options);
}
return this._reader.static(task.patterns, options);
}
}
sync$2.default = ProviderSync;
var settings = {};
(function (exports) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;
const fs = require$$0__default;
const os = require$$2;
/**
* The `os.cpus` method can return zero. We expect the number of cores to be greater than zero.
* https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107
*/
const CPU_COUNT = Math.max(os.cpus().length, 1);
exports.DEFAULT_FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
lstatSync: fs.lstatSync,
stat: fs.stat,
statSync: fs.statSync,
readdir: fs.readdir,
readdirSync: fs.readdirSync
};
class Settings {
constructor(_options = {}) {
this._options = _options;
this.absolute = this._getValue(this._options.absolute, false);
this.baseNameMatch = this._getValue(this._options.baseNameMatch, false);
this.braceExpansion = this._getValue(this._options.braceExpansion, true);
this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true);
this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT);
this.cwd = this._getValue(this._options.cwd, process.cwd());
this.deep = this._getValue(this._options.deep, Infinity);
this.dot = this._getValue(this._options.dot, false);
this.extglob = this._getValue(this._options.extglob, true);
this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true);
this.fs = this._getFileSystemMethods(this._options.fs);
this.globstar = this._getValue(this._options.globstar, true);
this.ignore = this._getValue(this._options.ignore, []);
this.markDirectories = this._getValue(this._options.markDirectories, false);
this.objectMode = this._getValue(this._options.objectMode, false);
this.onlyDirectories = this._getValue(this._options.onlyDirectories, false);
this.onlyFiles = this._getValue(this._options.onlyFiles, true);
this.stats = this._getValue(this._options.stats, false);
this.suppressErrors = this._getValue(this._options.suppressErrors, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false);
this.unique = this._getValue(this._options.unique, true);
if (this.onlyDirectories) {
this.onlyFiles = false;
}
if (this.stats) {
this.objectMode = true;
}
// Remove the cast to the array in the next major (#404).
this.ignore = [].concat(this.ignore);
}
_getValue(option, value) {
return option === undefined ? value : option;
}
_getFileSystemMethods(methods = {}) {
return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods);
}
}
exports.default = Settings;
} (settings));
const taskManager = tasks;
const async_1 = async$7;
const stream_1 = stream;
const sync_1 = sync$2;
const settings_1 = settings;
const utils = utils$g;
async function FastGlob(source, options) {
assertPatternsInput(source);
const works = getWorks(source, async_1.default, options);
const result = await Promise.all(works);
return utils.array.flatten(result);
}
// https://github.com/typescript-eslint/typescript-eslint/issues/60
// eslint-disable-next-line no-redeclare
(function (FastGlob) {
FastGlob.glob = FastGlob;
FastGlob.globSync = sync;
FastGlob.globStream = stream;
FastGlob.async = FastGlob;
function sync(source, options) {
assertPatternsInput(source);
const works = getWorks(source, sync_1.default, options);
return utils.array.flatten(works);
}
FastGlob.sync = sync;
function stream(source, options) {
assertPatternsInput(source);
const works = getWorks(source, stream_1.default, options);
/**
* The stream returned by the provider cannot work with an asynchronous iterator.
* To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams.
* This affects performance (+25%). I don't see best solution right now.
*/
return utils.stream.merge(works);
}
FastGlob.stream = stream;
function generateTasks(source, options) {
assertPatternsInput(source);
const patterns = [].concat(source);
const settings = new settings_1.default(options);
return taskManager.generate(patterns, settings);
}
FastGlob.generateTasks = generateTasks;
function isDynamicPattern(source, options) {
assertPatternsInput(source);
const settings = new settings_1.default(options);
return utils.pattern.isDynamicPattern(source, settings);
}
FastGlob.isDynamicPattern = isDynamicPattern;
function escapePath(source) {
assertPatternsInput(source);
return utils.path.escape(source);
}
FastGlob.escapePath = escapePath;
function convertPathToPattern(source) {
assertPatternsInput(source);
return utils.path.convertPathToPattern(source);
}
FastGlob.convertPathToPattern = convertPathToPattern;
(function (posix) {
function escapePath(source) {
assertPatternsInput(source);
return utils.path.escapePosixPath(source);
}
posix.escapePath = escapePath;
function convertPathToPattern(source) {
assertPatternsInput(source);
return utils.path.convertPosixPathToPattern(source);
}
posix.convertPathToPattern = convertPathToPattern;
})(FastGlob.posix || (FastGlob.posix = {}));
(function (win32) {
function escapePath(source) {
assertPatternsInput(source);
return utils.path.escapeWindowsPath(source);
}
win32.escapePath = escapePath;
function convertPathToPattern(source) {
assertPatternsInput(source);
return utils.path.convertWindowsPathToPattern(source);
}
win32.convertPathToPattern = convertPathToPattern;
})(FastGlob.win32 || (FastGlob.win32 = {}));
})(FastGlob || (FastGlob = {}));
function getWorks(source, _Provider, options) {
const patterns = [].concat(source);
const settings = new settings_1.default(options);
const tasks = taskManager.generate(patterns, settings);
const provider = new _Provider(settings);
return tasks.map(provider.read, provider);
}
function assertPatternsInput(input) {
const source = [].concat(input);
const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item));
if (!isValidSource) {
throw new TypeError('Patterns must be a string (non empty) or an array of strings');
}
}
var out = FastGlob;
var glob = /*@__PURE__*/getDefaultExportFromCjs(out);
var src$2 = {};
// @ts-check
const path$a = require$$0$4;
const fs$a = require$$0__default;
const os$3 = require$$2;
const fsReadFileAsync = fs$a.promises.readFile;
/** @type {(name: string, sync: boolean) => string[]} */
function getDefaultSearchPlaces(name, sync) {
return [
'package.json',
`.${name}rc.json`,
`.${name}rc.js`,
`.${name}rc.cjs`,
...(sync ? [] : [`.${name}rc.mjs`]),
`.config/${name}rc`,
`.config/${name}rc.json`,
`.config/${name}rc.js`,
`.config/${name}rc.cjs`,
...(sync ? [] : [`.config/${name}rc.mjs`]),
`${name}.config.js`,
`${name}.config.cjs`,
...(sync ? [] : [`${name}.config.mjs`]),
];
}
/**
* @type {(p: string) => string}
*
* see #17
* On *nix, if cwd is not under homedir,
* the last path will be '', ('/build' -> '')
* but it should be '/' actually.
* And on Windows, this will never happen. ('C:\build' -> 'C:')
*/
function parentDir(p) {
return path$a.dirname(p) || path$a.sep;
}
/** @type {import('./index').LoaderSync} */
const jsonLoader = (_, content) => JSON.parse(content);
// Use plain require in webpack context for dynamic import
const requireFunc = typeof __webpack_require__ === "function" ? __non_webpack_require__ : __require;
/** @type {import('./index').LoadersSync} */
const defaultLoadersSync = Object.freeze({
'.js': requireFunc,
'.json': requireFunc,
'.cjs': requireFunc,
noExt: jsonLoader,
});
src$2.defaultLoadersSync = defaultLoadersSync;
/** @type {import('./index').Loader} */
const dynamicImport = async id => {
try {
const mod = await import(/* webpackIgnore: true */ id);
return mod.default;
} catch (e) {
try {
return requireFunc(id);
} catch (/** @type {any} */ requireE) {
if (
requireE.code === 'ERR_REQUIRE_ESM' ||
(requireE instanceof SyntaxError &&
requireE
.toString()
.includes('Cannot use import statement outside a module'))
) {
throw e;
}
throw requireE;
}
}
};
/** @type {import('./index').Loaders} */
const defaultLoaders = Object.freeze({
'.js': dynamicImport,
'.mjs': dynamicImport,
'.cjs': dynamicImport,
'.json': jsonLoader,
noExt: jsonLoader,
});
src$2.defaultLoaders = defaultLoaders;
/**
* @param {string} name
* @param {import('./index').Options | import('./index').OptionsSync} options
* @param {boolean} sync
* @returns {Required<import('./index').Options | import('./index').OptionsSync>}
*/
function getOptions(name, options, sync) {
/** @type {Required<import('./index').Options>} */
const conf = {
stopDir: os$3.homedir(),
searchPlaces: getDefaultSearchPlaces(name, sync),
ignoreEmptySearchPlaces: true,
cache: true,
transform: x => x,
packageProp: [name],
...options,
loaders: {
...(sync ? defaultLoadersSync : defaultLoaders),
...options.loaders,
},
};
conf.searchPlaces.forEach(place => {
const key = path$a.extname(place) || 'noExt';
const loader = conf.loaders[key];
if (!loader) {
throw new Error(`Missing loader for extension "${place}"`);
}
if (typeof loader !== 'function') {
throw new Error(
`Loader for extension "${place}" is not a function: Received ${typeof loader}.`,
);
}
});
return conf;
}
/** @type {(props: string | string[], obj: Record<string, any>) => unknown} */
function getPackageProp(props, obj) {
if (typeof props === 'string' && props in obj) return obj[props];
return (
(Array.isArray(props) ? props : props.split('.')).reduce(
(acc, prop) => (acc === undefined ? acc : acc[prop]),
obj,
) || null
);
}
/** @param {string} filepath */
function validateFilePath(filepath) {
if (!filepath) throw new Error('load must pass a non-empty string');
}
/** @type {(loader: import('./index').Loader, ext: string) => void} */
function validateLoader(loader, ext) {
if (!loader) throw new Error(`No loader specified for extension "${ext}"`);
if (typeof loader !== 'function') throw new Error('loader is not a function');
}
/** @type {(enableCache: boolean) => <T>(c: Map<string, T>, filepath: string, res: T) => T} */
const makeEmplace = enableCache => (c, filepath, res) => {
if (enableCache) c.set(filepath, res);
return res;
};
/** @type {import('./index').lilconfig} */
src$2.lilconfig = function lilconfig(name, options) {
const {
ignoreEmptySearchPlaces,
loaders,
packageProp,
searchPlaces,
stopDir,
transform,
cache,
} = getOptions(name, options ?? {}, false);
const searchCache = new Map();
const loadCache = new Map();
const emplace = makeEmplace(cache);
return {
async search(searchFrom = process.cwd()) {
/** @type {import('./index').LilconfigResult} */
const result = {
config: null,
filepath: '',
};
/** @type {Set<string>} */
const visited = new Set();
let dir = searchFrom;
dirLoop: while (true) {
if (cache) {
const r = searchCache.get(dir);
if (r !== undefined) {
for (const p of visited) searchCache.set(p, r);
return r;
}
visited.add(dir);
}
for (const searchPlace of searchPlaces) {
const filepath = path$a.join(dir, searchPlace);
try {
await fs$a.promises.access(filepath);
} catch {
continue;
}
const content = String(await fsReadFileAsync(filepath));
const loaderKey = path$a.extname(searchPlace) || 'noExt';
const loader = loaders[loaderKey];
// handle package.json
if (searchPlace === 'package.json') {
const pkg = await loader(filepath, content);
const maybeConfig = getPackageProp(packageProp, pkg);
if (maybeConfig != null) {
result.config = maybeConfig;
result.filepath = filepath;
break dirLoop;
}
continue;
}
// handle other type of configs
const isEmpty = content.trim() === '';
if (isEmpty && ignoreEmptySearchPlaces) continue;
if (isEmpty) {
result.isEmpty = true;
result.config = undefined;
} else {
validateLoader(loader, loaderKey);
result.config = await loader(filepath, content);
}
result.filepath = filepath;
break dirLoop;
}
if (dir === stopDir || dir === parentDir(dir)) break dirLoop;
dir = parentDir(dir);
}
const transformed =
// not found
result.filepath === '' && result.config === null
? transform(null)
: transform(result);
if (cache) {
for (const p of visited) searchCache.set(p, transformed);
}
return transformed;
},
async load(filepath) {
validateFilePath(filepath);
const absPath = path$a.resolve(process.cwd(), filepath);
if (cache && loadCache.has(absPath)) {
return loadCache.get(absPath);
}
const {base, ext} = path$a.parse(absPath);
const loaderKey = ext || 'noExt';
const loader = loaders[loaderKey];
validateLoader(loader, loaderKey);
const content = String(await fsReadFileAsync(absPath));
if (base === 'package.json') {
const pkg = await loader(absPath, content);
return emplace(
loadCache,
absPath,
transform({
config: getPackageProp(packageProp, pkg),
filepath: absPath,
}),
);
}
/** @type {import('./index').LilconfigResult} */
const result = {
config: null,
filepath: absPath,
};
// handle other type of configs
const isEmpty = content.trim() === '';
if (isEmpty && ignoreEmptySearchPlaces)
return emplace(
loadCache,
absPath,
transform({
config: undefined,
filepath: absPath,
isEmpty: true,
}),
);
// cosmiconfig returns undefined for empty files
result.config = isEmpty ? undefined : await loader(absPath, content);
return emplace(
loadCache,
absPath,
transform(isEmpty ? {...result, isEmpty, config: undefined} : result),
);
},
clearLoadCache() {
if (cache) loadCache.clear();
},
clearSearchCache() {
if (cache) searchCache.clear();
},
clearCaches() {
if (cache) {
loadCache.clear();
searchCache.clear();
}
},
};
};
/** @type {import('./index').lilconfigSync} */
src$2.lilconfigSync = function lilconfigSync(name, options) {
const {
ignoreEmptySearchPlaces,
loaders,
packageProp,
searchPlaces,
stopDir,
transform,
cache,
} = getOptions(name, options ?? {}, true);
const searchCache = new Map();
const loadCache = new Map();
const emplace = makeEmplace(cache);
return {
search(searchFrom = process.cwd()) {
/** @type {import('./index').LilconfigResult} */
const result = {
config: null,
filepath: '',
};
/** @type {Set<string>} */
const visited = new Set();
let dir = searchFrom;
dirLoop: while (true) {
if (cache) {
const r = searchCache.get(dir);
if (r !== undefined) {
for (const p of visited) searchCache.set(p, r);
return r;
}
visited.add(dir);
}
for (const searchPlace of searchPlaces) {
const filepath = path$a.join(dir, searchPlace);
try {
fs$a.accessSync(filepath);
} catch {
continue;
}
const loaderKey = path$a.extname(searchPlace) || 'noExt';
const loader = loaders[loaderKey];
const content = String(fs$a.readFileSync(filepath));
// handle package.json
if (searchPlace === 'package.json') {
const pkg = loader(filepath, content);
const maybeConfig = getPackageProp(packageProp, pkg);
if (maybeConfig != null) {
result.config = maybeConfig;
result.filepath = filepath;
break dirLoop;
}
continue;
}
// handle other type of configs
const isEmpty = content.trim() === '';
if (isEmpty && ignoreEmptySearchPlaces) continue;
if (isEmpty) {
result.isEmpty = true;
result.config = undefined;
} else {
validateLoader(loader, loaderKey);
result.config = loader(filepath, content);
}
result.filepath = filepath;
break dirLoop;
}
if (dir === stopDir || dir === parentDir(dir)) break dirLoop;
dir = parentDir(dir);
}
const transformed =
// not found
result.filepath === '' && result.config === null
? transform(null)
: transform(result);
if (cache) {
for (const p of visited) searchCache.set(p, transformed);
}
return transformed;
},
load(filepath) {
validateFilePath(filepath);
const absPath = path$a.resolve(process.cwd(), filepath);
if (cache && loadCache.has(absPath)) {
return loadCache.get(absPath);
}
const {base, ext} = path$a.parse(absPath);
const loaderKey = ext || 'noExt';
const loader = loaders[loaderKey];
validateLoader(loader, loaderKey);
const content = String(fs$a.readFileSync(absPath));
if (base === 'package.json') {
const pkg = loader(absPath, content);
return transform({
config: getPackageProp(packageProp, pkg),
filepath: absPath,
});
}
const result = {
config: null,
filepath: absPath,
};
// handle other type of configs
const isEmpty = content.trim() === '';
if (isEmpty && ignoreEmptySearchPlaces)
return emplace(
loadCache,
absPath,
transform({
filepath: absPath,
config: undefined,
isEmpty: true,
}),
);
// cosmiconfig returns undefined for empty files
result.config = isEmpty ? undefined : loader(absPath, content);
return emplace(
loadCache,
absPath,
transform(isEmpty ? {...result, isEmpty, config: undefined} : result),
);
},
clearLoadCache() {
if (cache) loadCache.clear();
},
clearSearchCache() {
if (cache) searchCache.clear();
},
clearCaches() {
if (cache) {
loadCache.clear();
searchCache.clear();
}
},
};
};
const ALIAS = Symbol.for('yaml.alias');
const DOC = Symbol.for('yaml.document');
const MAP = Symbol.for('yaml.map');
const PAIR = Symbol.for('yaml.pair');
const SCALAR$1 = Symbol.for('yaml.scalar');
const SEQ = Symbol.for('yaml.seq');
const NODE_TYPE = Symbol.for('yaml.node.type');
const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;
const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;
const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;
const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;
const isScalar$1 = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR$1;
const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;
function isCollection$1(node) {
if (node && typeof node === 'object')
switch (node[NODE_TYPE]) {
case MAP:
case SEQ:
return true;
}
return false;
}
function isNode$1(node) {
if (node && typeof node === 'object')
switch (node[NODE_TYPE]) {
case ALIAS:
case MAP:
case SCALAR$1:
case SEQ:
return true;
}
return false;
}
const hasAnchor = (node) => (isScalar$1(node) || isCollection$1(node)) && !!node.anchor;
const BREAK$1 = Symbol('break visit');
const SKIP$1 = Symbol('skip children');
const REMOVE$1 = Symbol('remove node');
/**
* Apply a visitor to an AST node or document.
*
* Walks through the tree (depth-first) starting from `node`, calling a
* `visitor` function with three arguments:
* - `key`: For sequence values and map `Pair`, the node's index in the
* collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
* `null` for the root node.
* - `node`: The current node.
* - `path`: The ancestry of the current node.
*
* The return value of the visitor may be used to control the traversal:
* - `undefined` (default): Do nothing and continue
* - `visit.SKIP`: Do not visit the children of this node, continue with next
* sibling
* - `visit.BREAK`: Terminate traversal completely
* - `visit.REMOVE`: Remove the current node, then continue with the next one
* - `Node`: Replace the current node, then continue by visiting it
* - `number`: While iterating the items of a sequence or map, set the index
* of the next step. This is useful especially if the index of the current
* node has changed.
*
* If `visitor` is a single function, it will be called with all values
* encountered in the tree, including e.g. `null` values. Alternatively,
* separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
* `Alias` and `Scalar` node. To define the same visitor function for more than
* one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
* and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
* specific defined one will be used for each node.
*/
function visit$1(node, visitor) {
const visitor_ = initVisitor(visitor);
if (isDocument(node)) {
const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));
if (cd === REMOVE$1)
node.contents = null;
}
else
visit_(null, node, visitor_, Object.freeze([]));
}
// Without the `as symbol` casts, TS declares these in the `visit`
// namespace using `var`, but then complains about that because
// `unique symbol` must be `const`.
/** Terminate visit traversal completely */
visit$1.BREAK = BREAK$1;
/** Do not visit the children of the current node */
visit$1.SKIP = SKIP$1;
/** Remove the current node */
visit$1.REMOVE = REMOVE$1;
function visit_(key, node, visitor, path) {
const ctrl = callVisitor(key, node, visitor, path);
if (isNode$1(ctrl) || isPair(ctrl)) {
replaceNode(key, path, ctrl);
return visit_(key, ctrl, visitor, path);
}
if (typeof ctrl !== 'symbol') {
if (isCollection$1(node)) {
path = Object.freeze(path.concat(node));
for (let i = 0; i < node.items.length; ++i) {
const ci = visit_(i, node.items[i], visitor, path);
if (typeof ci === 'number')
i = ci - 1;
else if (ci === BREAK$1)
return BREAK$1;
else if (ci === REMOVE$1) {
node.items.splice(i, 1);
i -= 1;
}
}
}
else if (isPair(node)) {
path = Object.freeze(path.concat(node));
const ck = visit_('key', node.key, visitor, path);
if (ck === BREAK$1)
return BREAK$1;
else if (ck === REMOVE$1)
node.key = null;
const cv = visit_('value', node.value, visitor, path);
if (cv === BREAK$1)
return BREAK$1;
else if (cv === REMOVE$1)
node.value = null;
}
}
return ctrl;
}
/**
* Apply an async visitor to an AST node or document.
*
* Walks through the tree (depth-first) starting from `node`, calling a
* `visitor` function with three arguments:
* - `key`: For sequence values and map `Pair`, the node's index in the
* collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
* `null` for the root node.
* - `node`: The current node.
* - `path`: The ancestry of the current node.
*
* The return value of the visitor may be used to control the traversal:
* - `Promise`: Must resolve to one of the following values
* - `undefined` (default): Do nothing and continue
* - `visit.SKIP`: Do not visit the children of this node, continue with next
* sibling
* - `visit.BREAK`: Terminate traversal completely
* - `visit.REMOVE`: Remove the current node, then continue with the next one
* - `Node`: Replace the current node, then continue by visiting it
* - `number`: While iterating the items of a sequence or map, set the index
* of the next step. This is useful especially if the index of the current
* node has changed.
*
* If `visitor` is a single function, it will be called with all values
* encountered in the tree, including e.g. `null` values. Alternatively,
* separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
* `Alias` and `Scalar` node. To define the same visitor function for more than
* one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
* and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
* specific defined one will be used for each node.
*/
async function visitAsync(node, visitor) {
const visitor_ = initVisitor(visitor);
if (isDocument(node)) {
const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));
if (cd === REMOVE$1)
node.contents = null;
}
else
await visitAsync_(null, node, visitor_, Object.freeze([]));
}
// Without the `as symbol` casts, TS declares these in the `visit`
// namespace using `var`, but then complains about that because
// `unique symbol` must be `const`.
/** Terminate visit traversal completely */
visitAsync.BREAK = BREAK$1;
/** Do not visit the children of the current node */
visitAsync.SKIP = SKIP$1;
/** Remove the current node */
visitAsync.REMOVE = REMOVE$1;
async function visitAsync_(key, node, visitor, path) {
const ctrl = await callVisitor(key, node, visitor, path);
if (isNode$1(ctrl) || isPair(ctrl)) {
replaceNode(key, path, ctrl);
return visitAsync_(key, ctrl, visitor, path);
}
if (typeof ctrl !== 'symbol') {
if (isCollection$1(node)) {
path = Object.freeze(path.concat(node));
for (let i = 0; i < node.items.length; ++i) {
const ci = await visitAsync_(i, node.items[i], visitor, path);
if (typeof ci === 'number')
i = ci - 1;
else if (ci === BREAK$1)
return BREAK$1;
else if (ci === REMOVE$1) {
node.items.splice(i, 1);
i -= 1;
}
}
}
else if (isPair(node)) {
path = Object.freeze(path.concat(node));
const ck = await visitAsync_('key', node.key, visitor, path);
if (ck === BREAK$1)
return BREAK$1;
else if (ck === REMOVE$1)
node.key = null;
const cv = await visitAsync_('value', node.value, visitor, path);
if (cv === BREAK$1)
return BREAK$1;
else if (cv === REMOVE$1)
node.value = null;
}
}
return ctrl;
}
function initVisitor(visitor) {
if (typeof visitor === 'object' &&
(visitor.Collection || visitor.Node || visitor.Value)) {
return Object.assign({
Alias: visitor.Node,
Map: visitor.Node,
Scalar: visitor.Node,
Seq: visitor.Node
}, visitor.Value && {
Map: visitor.Value,
Scalar: visitor.Value,
Seq: visitor.Value
}, visitor.Collection && {
Map: visitor.Collection,
Seq: visitor.Collection
}, visitor);
}
return visitor;
}
function callVisitor(key, node, visitor, path) {
if (typeof visitor === 'function')
return visitor(key, node, path);
if (isMap(node))
return visitor.Map?.(key, node, path);
if (isSeq(node))
return visitor.Seq?.(key, node, path);
if (isPair(node))
return visitor.Pair?.(key, node, path);
if (isScalar$1(node))
return visitor.Scalar?.(key, node, path);
if (isAlias(node))
return visitor.Alias?.(key, node, path);
return undefined;
}
function replaceNode(key, path, node) {
const parent = path[path.length - 1];
if (isCollection$1(parent)) {
parent.items[key] = node;
}
else if (isPair(parent)) {
if (key === 'key')
parent.key = node;
else
parent.value = node;
}
else if (isDocument(parent)) {
parent.contents = node;
}
else {
const pt = isAlias(parent) ? 'alias' : 'scalar';
throw new Error(`Cannot replace node with ${pt} parent`);
}
}
const escapeChars = {
'!': '%21',
',': '%2C',
'[': '%5B',
']': '%5D',
'{': '%7B',
'}': '%7D'
};
const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]);
class Directives {
constructor(yaml, tags) {
/**
* The directives-end/doc-start marker `---`. If `null`, a marker may still be
* included in the document's stringified representation.
*/
this.docStart = null;
/** The doc-end marker `...`. */
this.docEnd = false;
this.yaml = Object.assign({}, Directives.defaultYaml, yaml);
this.tags = Object.assign({}, Directives.defaultTags, tags);
}
clone() {
const copy = new Directives(this.yaml, this.tags);
copy.docStart = this.docStart;
return copy;
}
/**
* During parsing, get a Directives instance for the current document and
* update the stream state according to the current version's spec.
*/
atDocument() {
const res = new Directives(this.yaml, this.tags);
switch (this.yaml.version) {
case '1.1':
this.atNextDocument = true;
break;
case '1.2':
this.atNextDocument = false;
this.yaml = {
explicit: Directives.defaultYaml.explicit,
version: '1.2'
};
this.tags = Object.assign({}, Directives.defaultTags);
break;
}
return res;
}
/**
* @param onError - May be called even if the action was successful
* @returns `true` on success
*/
add(line, onError) {
if (this.atNextDocument) {
this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };
this.tags = Object.assign({}, Directives.defaultTags);
this.atNextDocument = false;
}
const parts = line.trim().split(/[ \t]+/);
const name = parts.shift();
switch (name) {
case '%TAG': {
if (parts.length !== 2) {
onError(0, '%TAG directive should contain exactly two parts');
if (parts.length < 2)
return false;
}
const [handle, prefix] = parts;
this.tags[handle] = prefix;
return true;
}
case '%YAML': {
this.yaml.explicit = true;
if (parts.length !== 1) {
onError(0, '%YAML directive should contain exactly one part');
return false;
}
const [version] = parts;
if (version === '1.1' || version === '1.2') {
this.yaml.version = version;
return true;
}
else {
const isValid = /^\d+\.\d+$/.test(version);
onError(6, `Unsupported YAML version ${version}`, isValid);
return false;
}
}
default:
onError(0, `Unknown directive ${name}`, true);
return false;
}
}
/**
* Resolves a tag, matching handles to those defined in %TAG directives.
*
* @returns Resolved tag, which may also be the non-specific tag `'!'` or a
* `'!local'` tag, or `null` if unresolvable.
*/
tagName(source, onError) {
if (source === '!')
return '!'; // non-specific tag
if (source[0] !== '!') {
onError(`Not a valid tag: ${source}`);
return null;
}
if (source[1] === '<') {
const verbatim = source.slice(2, -1);
if (verbatim === '!' || verbatim === '!!') {
onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);
return null;
}
if (source[source.length - 1] !== '>')
onError('Verbatim tags must end with a >');
return verbatim;
}
const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/s);
if (!suffix)
onError(`The ${source} tag has no suffix`);
const prefix = this.tags[handle];
if (prefix) {
try {
return prefix + decodeURIComponent(suffix);
}
catch (error) {
onError(String(error));
return null;
}
}
if (handle === '!')
return source; // local tag
onError(`Could not resolve tag: ${source}`);
return null;
}
/**
* Given a fully resolved tag, returns its printable string form,
* taking into account current tag prefixes and defaults.
*/
tagString(tag) {
for (const [handle, prefix] of Object.entries(this.tags)) {
if (tag.startsWith(prefix))
return handle + escapeTagName(tag.substring(prefix.length));
}
return tag[0] === '!' ? tag : `!<${tag}>`;
}
toString(doc) {
const lines = this.yaml.explicit
? [`%YAML ${this.yaml.version || '1.2'}`]
: [];
const tagEntries = Object.entries(this.tags);
let tagNames;
if (doc && tagEntries.length > 0 && isNode$1(doc.contents)) {
const tags = {};
visit$1(doc.contents, (_key, node) => {
if (isNode$1(node) && node.tag)
tags[node.tag] = true;
});
tagNames = Object.keys(tags);
}
else
tagNames = [];
for (const [handle, prefix] of tagEntries) {
if (handle === '!!' && prefix === 'tag:yaml.org,2002:')
continue;
if (!doc || tagNames.some(tn => tn.startsWith(prefix)))
lines.push(`%TAG ${handle} ${prefix}`);
}
return lines.join('\n');
}
}
Directives.defaultYaml = { explicit: false, version: '1.2' };
Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' };
/**
* Verify that the input string is a valid anchor.
*
* Will throw on errors.
*/
function anchorIsValid(anchor) {
if (/[\x00-\x19\s,[\]{}]/.test(anchor)) {
const sa = JSON.stringify(anchor);
const msg = `Anchor must not contain whitespace or control characters: ${sa}`;
throw new Error(msg);
}
return true;
}
function anchorNames(root) {
const anchors = new Set();
visit$1(root, {
Value(_key, node) {
if (node.anchor)
anchors.add(node.anchor);
}
});
return anchors;
}
/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */
function findNewAnchor(prefix, exclude) {
for (let i = 1; true; ++i) {
const name = `${prefix}${i}`;
if (!exclude.has(name))
return name;
}
}
function createNodeAnchors(doc, prefix) {
const aliasObjects = [];
const sourceObjects = new Map();
let prevAnchors = null;
return {
onAnchor: (source) => {
aliasObjects.push(source);
if (!prevAnchors)
prevAnchors = anchorNames(doc);
const anchor = findNewAnchor(prefix, prevAnchors);
prevAnchors.add(anchor);
return anchor;
},
/**
* With circular references, the source node is only resolved after all
* of its child nodes are. This is why anchors are set only after all of
* the nodes have been created.
*/
setAnchors: () => {
for (const source of aliasObjects) {
const ref = sourceObjects.get(source);
if (typeof ref === 'object' &&
ref.anchor &&
(isScalar$1(ref.node) || isCollection$1(ref.node))) {
ref.node.anchor = ref.anchor;
}
else {
const error = new Error('Failed to resolve repeated object (this should not happen)');
error.source = source;
throw error;
}
}
},
sourceObjects
};
}
/**
* Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,
* in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the
* 2021 edition: https://tc39.es/ecma262/#sec-json.parse
*
* Includes extensions for handling Map and Set objects.
*/
function applyReviver(reviver, obj, key, val) {
if (val && typeof val === 'object') {
if (Array.isArray(val)) {
for (let i = 0, len = val.length; i < len; ++i) {
const v0 = val[i];
const v1 = applyReviver(reviver, val, String(i), v0);
if (v1 === undefined)
delete val[i];
else if (v1 !== v0)
val[i] = v1;
}
}
else if (val instanceof Map) {
for (const k of Array.from(val.keys())) {
const v0 = val.get(k);
const v1 = applyReviver(reviver, val, k, v0);
if (v1 === undefined)
val.delete(k);
else if (v1 !== v0)
val.set(k, v1);
}
}
else if (val instanceof Set) {
for (const v0 of Array.from(val)) {
const v1 = applyReviver(reviver, val, v0, v0);
if (v1 === undefined)
val.delete(v0);
else if (v1 !== v0) {
val.delete(v0);
val.add(v1);
}
}
}
else {
for (const [k, v0] of Object.entries(val)) {
const v1 = applyReviver(reviver, val, k, v0);
if (v1 === undefined)
delete val[k];
else if (v1 !== v0)
val[k] = v1;
}
}
}
return reviver.call(obj, key, val);
}
/**
* Recursively convert any node or its contents to native JavaScript
*
* @param value - The input value
* @param arg - If `value` defines a `toJSON()` method, use this
* as its first argument
* @param ctx - Conversion context, originally set in Document#toJS(). If
* `{ keep: true }` is not set, output should be suitable for JSON
* stringification.
*/
function toJS(value, arg, ctx) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
if (Array.isArray(value))
return value.map((v, i) => toJS(v, String(i), ctx));
if (value && typeof value.toJSON === 'function') {
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
if (!ctx || !hasAnchor(value))
return value.toJSON(arg, ctx);
const data = { aliasCount: 0, count: 1, res: undefined };
ctx.anchors.set(value, data);
ctx.onCreate = res => {
data.res = res;
delete ctx.onCreate;
};
const res = value.toJSON(arg, ctx);
if (ctx.onCreate)
ctx.onCreate(res);
return res;
}
if (typeof value === 'bigint' && !ctx?.keep)
return Number(value);
return value;
}
class NodeBase {
constructor(type) {
Object.defineProperty(this, NODE_TYPE, { value: type });
}
/** Create a copy of this node. */
clone() {
const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
if (this.range)
copy.range = this.range.slice();
return copy;
}
/** A plain JavaScript representation of this node. */
toJS(doc, { mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {
if (!isDocument(doc))
throw new TypeError('A document argument is required');
const ctx = {
anchors: new Map(),
doc,
keep: true,
mapAsMap: mapAsMap === true,
mapKeyWarned: false,
maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100
};
const res = toJS(this, '', ctx);
if (typeof onAnchor === 'function')
for (const { count, res } of ctx.anchors.values())
onAnchor(res, count);
return typeof reviver === 'function'
? applyReviver(reviver, { '': res }, '', res)
: res;
}
}
class Alias extends NodeBase {
constructor(source) {
super(ALIAS);
this.source = source;
Object.defineProperty(this, 'tag', {
set() {
throw new Error('Alias nodes cannot have tags');
}
});
}
/**
* Resolve the value of this alias within `doc`, finding the last
* instance of the `source` anchor before this node.
*/
resolve(doc) {
let found = undefined;
visit$1(doc, {
Node: (_key, node) => {
if (node === this)
return visit$1.BREAK;
if (node.anchor === this.source)
found = node;
}
});
return found;
}
toJSON(_arg, ctx) {
if (!ctx)
return { source: this.source };
const { anchors, doc, maxAliasCount } = ctx;
const source = this.resolve(doc);
if (!source) {
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
throw new ReferenceError(msg);
}
let data = anchors.get(source);
if (!data) {
// Resolve anchors for Node.prototype.toJS()
toJS(source, null, ctx);
data = anchors.get(source);
}
/* istanbul ignore if */
if (!data || data.res === undefined) {
const msg = 'This should not happen: Alias anchor was not resolved?';
throw new ReferenceError(msg);
}
if (maxAliasCount >= 0) {
data.count += 1;
if (data.aliasCount === 0)
data.aliasCount = getAliasCount(doc, source, anchors);
if (data.count * data.aliasCount > maxAliasCount) {
const msg = 'Excessive alias count indicates a resource exhaustion attack';
throw new ReferenceError(msg);
}
}
return data.res;
}
toString(ctx, _onComment, _onChompKeep) {
const src = `*${this.source}`;
if (ctx) {
anchorIsValid(this.source);
if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
throw new Error(msg);
}
if (ctx.implicitKey)
return `${src} `;
}
return src;
}
}
function getAliasCount(doc, node, anchors) {
if (isAlias(node)) {
const source = node.resolve(doc);
const anchor = anchors && source && anchors.get(source);
return anchor ? anchor.count * anchor.aliasCount : 0;
}
else if (isCollection$1(node)) {
let count = 0;
for (const item of node.items) {
const c = getAliasCount(doc, item, anchors);
if (c > count)
count = c;
}
return count;
}
else if (isPair(node)) {
const kc = getAliasCount(doc, node.key, anchors);
const vc = getAliasCount(doc, node.value, anchors);
return Math.max(kc, vc);
}
return 1;
}
const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');
class Scalar extends NodeBase {
constructor(value) {
super(SCALAR$1);
this.value = value;
}
toJSON(arg, ctx) {
return ctx?.keep ? this.value : toJS(this.value, arg, ctx);
}
toString() {
return String(this.value);
}
}
Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED';
Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL';
Scalar.PLAIN = 'PLAIN';
Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';
Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE';
const defaultTagPrefix = 'tag:yaml.org,2002:';
function findTagObject(value, tagName, tags) {
if (tagName) {
const match = tags.filter(t => t.tag === tagName);
const tagObj = match.find(t => !t.format) ?? match[0];
if (!tagObj)
throw new Error(`Tag ${tagName} not found`);
return tagObj;
}
return tags.find(t => t.identify?.(value) && !t.format);
}
function createNode(value, tagName, ctx) {
if (isDocument(value))
value = value.contents;
if (isNode$1(value))
return value;
if (isPair(value)) {
const map = ctx.schema[MAP].createNode?.(ctx.schema, null, ctx);
map.items.push(value);
return map;
}
if (value instanceof String ||
value instanceof Number ||
value instanceof Boolean ||
(typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere
) {
// https://tc39.es/ecma262/#sec-serializejsonproperty
value = value.valueOf();
}
const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;
// Detect duplicate references to the same object & use Alias nodes for all
// after first. The `ref` wrapper allows for circular references to resolve.
let ref = undefined;
if (aliasDuplicateObjects && value && typeof value === 'object') {
ref = sourceObjects.get(value);
if (ref) {
if (!ref.anchor)
ref.anchor = onAnchor(value);
return new Alias(ref.anchor);
}
else {
ref = { anchor: null, node: null };
sourceObjects.set(value, ref);
}
}
if (tagName?.startsWith('!!'))
tagName = defaultTagPrefix + tagName.slice(2);
let tagObj = findTagObject(value, tagName, schema.tags);
if (!tagObj) {
if (value && typeof value.toJSON === 'function') {
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
value = value.toJSON();
}
if (!value || typeof value !== 'object') {
const node = new Scalar(value);
if (ref)
ref.node = node;
return node;
}
tagObj =
value instanceof Map
? schema[MAP]
: Symbol.iterator in Object(value)
? schema[SEQ]
: schema[MAP];
}
if (onTagObj) {
onTagObj(tagObj);
delete ctx.onTagObj;
}
const node = tagObj?.createNode
? tagObj.createNode(ctx.schema, value, ctx)
: typeof tagObj?.nodeClass?.from === 'function'
? tagObj.nodeClass.from(ctx.schema, value, ctx)
: new Scalar(value);
if (tagName)
node.tag = tagName;
else if (!tagObj.default)
node.tag = tagObj.tag;
if (ref)
ref.node = node;
return node;
}
function collectionFromPath(schema, path, value) {
let v = value;
for (let i = path.length - 1; i >= 0; --i) {
const k = path[i];
if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {
const a = [];
a[k] = v;
v = a;
}
else {
v = new Map([[k, v]]);
}
}
return createNode(v, undefined, {
aliasDuplicateObjects: false,
keepUndefined: false,
onAnchor: () => {
throw new Error('This should not happen, please report a bug.');
},
schema,
sourceObjects: new Map()
});
}
// Type guard is intentionally a little wrong so as to be more useful,
// as it does not cover untypable empty non-string iterables (e.g. []).
const isEmptyPath = (path) => path == null ||
(typeof path === 'object' && !!path[Symbol.iterator]().next().done);
class Collection extends NodeBase {
constructor(type, schema) {
super(type);
Object.defineProperty(this, 'schema', {
value: schema,
configurable: true,
enumerable: false,
writable: true
});
}
/**
* Create a copy of this collection.
*
* @param schema - If defined, overwrites the original's schema
*/
clone(schema) {
const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
if (schema)
copy.schema = schema;
copy.items = copy.items.map(it => isNode$1(it) || isPair(it) ? it.clone(schema) : it);
if (this.range)
copy.range = this.range.slice();
return copy;
}
/**
* Adds a value to the collection. For `!!map` and `!!omap` the value must
* be a Pair instance or a `{ key, value }` object, which may not have a key
* that already exists in the map.
*/
addIn(path, value) {
if (isEmptyPath(path))
this.add(value);
else {
const [key, ...rest] = path;
const node = this.get(key, true);
if (isCollection$1(node))
node.addIn(rest, value);
else if (node === undefined && this.schema)
this.set(key, collectionFromPath(this.schema, rest, value));
else
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
}
}
/**
* Removes a value from the collection.
* @returns `true` if the item was found and removed.
*/
deleteIn(path) {
const [key, ...rest] = path;
if (rest.length === 0)
return this.delete(key);
const node = this.get(key, true);
if (isCollection$1(node))
return node.deleteIn(rest);
else
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
}
/**
* Returns item at `key`, or `undefined` if not found. By default unwraps
* scalar values from their surrounding node; to disable set `keepScalar` to
* `true` (collections are always returned intact).
*/
getIn(path, keepScalar) {
const [key, ...rest] = path;
const node = this.get(key, true);
if (rest.length === 0)
return !keepScalar && isScalar$1(node) ? node.value : node;
else
return isCollection$1(node) ? node.getIn(rest, keepScalar) : undefined;
}
hasAllNullValues(allowScalar) {
return this.items.every(node => {
if (!isPair(node))
return false;
const n = node.value;
return (n == null ||
(allowScalar &&
isScalar$1(n) &&
n.value == null &&
!n.commentBefore &&
!n.comment &&
!n.tag));
});
}
/**
* Checks if the collection includes a value with the key `key`.
*/
hasIn(path) {
const [key, ...rest] = path;
if (rest.length === 0)
return this.has(key);
const node = this.get(key, true);
return isCollection$1(node) ? node.hasIn(rest) : false;
}
/**
* Sets a value in this collection. For `!!set`, `value` needs to be a
* boolean to add/remove the item from the set.
*/
setIn(path, value) {
const [key, ...rest] = path;
if (rest.length === 0) {
this.set(key, value);
}
else {
const node = this.get(key, true);
if (isCollection$1(node))
node.setIn(rest, value);
else if (node === undefined && this.schema)
this.set(key, collectionFromPath(this.schema, rest, value));
else
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
}
}
}
/**
* Stringifies a comment.
*
* Empty comment lines are left empty,
* lines consisting of a single space are replaced by `#`,
* and all other lines are prefixed with a `#`.
*/
const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');
function indentComment(comment, indent) {
if (/^\n+$/.test(comment))
return comment.substring(1);
return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;
}
const lineComment = (str, indent, comment) => str.endsWith('\n')
? indentComment(comment, indent)
: comment.includes('\n')
? '\n' + indentComment(comment, indent)
: (str.endsWith(' ') ? '' : ' ') + comment;
const FOLD_FLOW = 'flow';
const FOLD_BLOCK = 'block';
const FOLD_QUOTED = 'quoted';
/**
* Tries to keep input at up to `lineWidth` characters, splitting only on spaces
* not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are
* terminated with `\n` and started with `indent`.
*/
function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {
if (!lineWidth || lineWidth < 0)
return text;
if (lineWidth < minContentWidth)
minContentWidth = 0;
const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);
if (text.length <= endStep)
return text;
const folds = [];
const escapedFolds = {};
let end = lineWidth - indent.length;
if (typeof indentAtStart === 'number') {
if (indentAtStart > lineWidth - Math.max(2, minContentWidth))
folds.push(0);
else
end = lineWidth - indentAtStart;
}
let split = undefined;
let prev = undefined;
let overflow = false;
let i = -1;
let escStart = -1;
let escEnd = -1;
if (mode === FOLD_BLOCK) {
i = consumeMoreIndentedLines(text, i, indent.length);
if (i !== -1)
end = i + endStep;
}
for (let ch; (ch = text[(i += 1)]);) {
if (mode === FOLD_QUOTED && ch === '\\') {
escStart = i;
switch (text[i + 1]) {
case 'x':
i += 3;
break;
case 'u':
i += 5;
break;
case 'U':
i += 9;
break;
default:
i += 1;
}
escEnd = i;
}
if (ch === '\n') {
if (mode === FOLD_BLOCK)
i = consumeMoreIndentedLines(text, i, indent.length);
end = i + indent.length + endStep;
split = undefined;
}
else {
if (ch === ' ' &&
prev &&
prev !== ' ' &&
prev !== '\n' &&
prev !== '\t') {
// space surrounded by non-space can be replaced with newline + indent
const next = text[i + 1];
if (next && next !== ' ' && next !== '\n' && next !== '\t')
split = i;
}
if (i >= end) {
if (split) {
folds.push(split);
end = split + endStep;
split = undefined;
}
else if (mode === FOLD_QUOTED) {
// white-space collected at end may stretch past lineWidth
while (prev === ' ' || prev === '\t') {
prev = ch;
ch = text[(i += 1)];
overflow = true;
}
// Account for newline escape, but don't break preceding escape
const j = i > escEnd + 1 ? i - 2 : escStart - 1;
// Bail out if lineWidth & minContentWidth are shorter than an escape string
if (escapedFolds[j])
return text;
folds.push(j);
escapedFolds[j] = true;
end = j + endStep;
split = undefined;
}
else {
overflow = true;
}
}
}
prev = ch;
}
if (overflow && onOverflow)
onOverflow();
if (folds.length === 0)
return text;
if (onFold)
onFold();
let res = text.slice(0, folds[0]);
for (let i = 0; i < folds.length; ++i) {
const fold = folds[i];
const end = folds[i + 1] || text.length;
if (fold === 0)
res = `\n${indent}${text.slice(0, end)}`;
else {
if (mode === FOLD_QUOTED && escapedFolds[fold])
res += `${text[fold]}\\`;
res += `\n${indent}${text.slice(fold + 1, end)}`;
}
}
return res;
}
/**
* Presumes `i + 1` is at the start of a line
* @returns index of last newline in more-indented block
*/
function consumeMoreIndentedLines(text, i, indent) {
let end = i;
let start = i + 1;
let ch = text[start];
while (ch === ' ' || ch === '\t') {
if (i < start + indent) {
ch = text[++i];
}
else {
do {
ch = text[++i];
} while (ch && ch !== '\n');
end = i;
start = i + 1;
ch = text[start];
}
}
return end;
}
const getFoldOptions = (ctx, isBlock) => ({
indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart,
lineWidth: ctx.options.lineWidth,
minContentWidth: ctx.options.minContentWidth
});
// Also checks for lines starting with %, as parsing the output as YAML 1.1 will
// presume that's starting a new document.
const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str);
function lineLengthOverLimit(str, lineWidth, indentLength) {
if (!lineWidth || lineWidth < 0)
return false;
const limit = lineWidth - indentLength;
const strLen = str.length;
if (strLen <= limit)
return false;
for (let i = 0, start = 0; i < strLen; ++i) {
if (str[i] === '\n') {
if (i - start > limit)
return true;
start = i + 1;
if (strLen - start <= limit)
return false;
}
}
return true;
}
function doubleQuotedString(value, ctx) {
const json = JSON.stringify(value);
if (ctx.options.doubleQuotedAsJSON)
return json;
const { implicitKey } = ctx;
const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;
const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
let str = '';
let start = 0;
for (let i = 0, ch = json[i]; ch; ch = json[++i]) {
if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') {
// space before newline needs to be escaped to not be folded
str += json.slice(start, i) + '\\ ';
i += 1;
start = i;
ch = '\\';
}
if (ch === '\\')
switch (json[i + 1]) {
case 'u':
{
str += json.slice(start, i);
const code = json.substr(i + 2, 4);
switch (code) {
case '0000':
str += '\\0';
break;
case '0007':
str += '\\a';
break;
case '000b':
str += '\\v';
break;
case '001b':
str += '\\e';
break;
case '0085':
str += '\\N';
break;
case '00a0':
str += '\\_';
break;
case '2028':
str += '\\L';
break;
case '2029':
str += '\\P';
break;
default:
if (code.substr(0, 2) === '00')
str += '\\x' + code.substr(2);
else
str += json.substr(i, 6);
}
i += 5;
start = i + 1;
}
break;
case 'n':
if (implicitKey ||
json[i + 2] === '"' ||
json.length < minMultiLineLength) {
i += 1;
}
else {
// folding will eat first newline
str += json.slice(start, i) + '\n\n';
while (json[i + 2] === '\\' &&
json[i + 3] === 'n' &&
json[i + 4] !== '"') {
str += '\n';
i += 2;
}
str += indent;
// space after newline needs to be escaped to not be folded
if (json[i + 2] === ' ')
str += '\\';
i += 1;
start = i + 1;
}
break;
default:
i += 1;
}
}
str = start ? str + json.slice(start) : json;
return implicitKey
? str
: foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx, false));
}
function singleQuotedString(value, ctx) {
if (ctx.options.singleQuote === false ||
(ctx.implicitKey && value.includes('\n')) ||
/[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline
)
return doubleQuotedString(value, ctx);
const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'";
return ctx.implicitKey
? res
: foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx, false));
}
function quotedString(value, ctx) {
const { singleQuote } = ctx.options;
let qs;
if (singleQuote === false)
qs = doubleQuotedString;
else {
const hasDouble = value.includes('"');
const hasSingle = value.includes("'");
if (hasDouble && !hasSingle)
qs = singleQuotedString;
else if (hasSingle && !hasDouble)
qs = doubleQuotedString;
else
qs = singleQuote ? singleQuotedString : doubleQuotedString;
}
return qs(value, ctx);
}
// The negative lookbehind avoids a polynomial search,
// but isn't supported yet on Safari: https://caniuse.com/js-regexp-lookbehind
let blockEndNewlines;
try {
blockEndNewlines = new RegExp('(^|(?<!\n))\n+(?!\n|$)', 'g');
}
catch {
blockEndNewlines = /\n+(?!\n|$)/g;
}
function blockString({ comment, type, value }, ctx, onComment, onChompKeep) {
const { blockQuote, commentString, lineWidth } = ctx.options;
// 1. Block can't end in whitespace unless the last line is non-empty.
// 2. Strings consisting of only whitespace are best rendered explicitly.
if (!blockQuote || /\n[\t ]+$/.test(value) || /^\s*$/.test(value)) {
return quotedString(value, ctx);
}
const indent = ctx.indent ||
(ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');
const literal = blockQuote === 'literal'
? true
: blockQuote === 'folded' || type === Scalar.BLOCK_FOLDED
? false
: type === Scalar.BLOCK_LITERAL
? true
: !lineLengthOverLimit(value, lineWidth, indent.length);
if (!value)
return literal ? '|\n' : '>\n';
// determine chomping from whitespace at value end
let chomp;
let endStart;
for (endStart = value.length; endStart > 0; --endStart) {
const ch = value[endStart - 1];
if (ch !== '\n' && ch !== '\t' && ch !== ' ')
break;
}
let end = value.substring(endStart);
const endNlPos = end.indexOf('\n');
if (endNlPos === -1) {
chomp = '-'; // strip
}
else if (value === end || endNlPos !== end.length - 1) {
chomp = '+'; // keep
if (onChompKeep)
onChompKeep();
}
else {
chomp = ''; // clip
}
if (end) {
value = value.slice(0, -end.length);
if (end[end.length - 1] === '\n')
end = end.slice(0, -1);
end = end.replace(blockEndNewlines, `$&${indent}`);
}
// determine indent indicator from whitespace at value start
let startWithSpace = false;
let startEnd;
let startNlPos = -1;
for (startEnd = 0; startEnd < value.length; ++startEnd) {
const ch = value[startEnd];
if (ch === ' ')
startWithSpace = true;
else if (ch === '\n')
startNlPos = startEnd;
else
break;
}
let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);
if (start) {
value = value.substring(start.length);
start = start.replace(/\n+/g, `$&${indent}`);
}
const indentSize = indent ? '2' : '1'; // root is at -1
let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;
if (comment) {
header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' '));
if (onComment)
onComment();
}
if (literal) {
value = value.replace(/\n+/g, `$&${indent}`);
return `${header}\n${indent}${start}${value}${end}`;
}
value = value
.replace(/\n+/g, '\n$&')
.replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded
// ^ more-ind. ^ empty ^ capture next empty lines only at end of indent
.replace(/\n+/g, `$&${indent}`);
const body = foldFlowLines(`${start}${value}${end}`, indent, FOLD_BLOCK, getFoldOptions(ctx, true));
return `${header}\n${indent}${body}`;
}
function plainString(item, ctx, onComment, onChompKeep) {
const { type, value } = item;
const { actualString, implicitKey, indent, indentStep, inFlow } = ctx;
if ((implicitKey && value.includes('\n')) ||
(inFlow && /[[\]{},]/.test(value))) {
return quotedString(value, ctx);
}
if (!value ||
/^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) {
// not allowed:
// - empty string, '-' or '?'
// - start with an indicator character (except [?:-]) or /[?-] /
// - '\n ', ': ' or ' \n' anywhere
// - '#' not preceded by a non-space char
// - end with ' ' or ':'
return implicitKey || inFlow || !value.includes('\n')
? quotedString(value, ctx)
: blockString(item, ctx, onComment, onChompKeep);
}
if (!implicitKey &&
!inFlow &&
type !== Scalar.PLAIN &&
value.includes('\n')) {
// Where allowed & type not set explicitly, prefer block style for multiline strings
return blockString(item, ctx, onComment, onChompKeep);
}
if (containsDocumentMarker(value)) {
if (indent === '') {
ctx.forceBlockIndent = true;
return blockString(item, ctx, onComment, onChompKeep);
}
else if (implicitKey && indent === indentStep) {
return quotedString(value, ctx);
}
}
const str = value.replace(/\n+/g, `$&\n${indent}`);
// Verify that output will be parsed as a string, as e.g. plain numbers and
// booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),
// and others in v1.1.
if (actualString) {
const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str);
const { compat, tags } = ctx.doc.schema;
if (tags.some(test) || compat?.some(test))
return quotedString(value, ctx);
}
return implicitKey
? str
: foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx, false));
}
function stringifyString(item, ctx, onComment, onChompKeep) {
const { implicitKey, inFlow } = ctx;
const ss = typeof item.value === 'string'
? item
: Object.assign({}, item, { value: String(item.value) });
let { type } = item;
if (type !== Scalar.QUOTE_DOUBLE) {
// force double quotes on control characters & unpaired surrogates
if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value))
type = Scalar.QUOTE_DOUBLE;
}
const _stringify = (_type) => {
switch (_type) {
case Scalar.BLOCK_FOLDED:
case Scalar.BLOCK_LITERAL:
return implicitKey || inFlow
? quotedString(ss.value, ctx) // blocks are not valid inside flow containers
: blockString(ss, ctx, onComment, onChompKeep);
case Scalar.QUOTE_DOUBLE:
return doubleQuotedString(ss.value, ctx);
case Scalar.QUOTE_SINGLE:
return singleQuotedString(ss.value, ctx);
case Scalar.PLAIN:
return plainString(ss, ctx, onComment, onChompKeep);
default:
return null;
}
};
let res = _stringify(type);
if (res === null) {
const { defaultKeyType, defaultStringType } = ctx.options;
const t = (implicitKey && defaultKeyType) || defaultStringType;
res = _stringify(t);
if (res === null)
throw new Error(`Unsupported default string type ${t}`);
}
return res;
}
function createStringifyContext(doc, options) {
const opt = Object.assign({
blockQuote: true,
commentString: stringifyComment,
defaultKeyType: null,
defaultStringType: 'PLAIN',
directives: null,
doubleQuotedAsJSON: false,
doubleQuotedMinMultiLineLength: 40,
falseStr: 'false',
flowCollectionPadding: true,
indentSeq: true,
lineWidth: 80,
minContentWidth: 20,
nullStr: 'null',
simpleKeys: false,
singleQuote: null,
trueStr: 'true',
verifyAliasOrder: true
}, doc.schema.toStringOptions, options);
let inFlow;
switch (opt.collectionStyle) {
case 'block':
inFlow = false;
break;
case 'flow':
inFlow = true;
break;
default:
inFlow = null;
}
return {
anchors: new Set(),
doc,
flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '',
indent: '',
indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',
inFlow,
options: opt
};
}
function getTagObject(tags, item) {
if (item.tag) {
const match = tags.filter(t => t.tag === item.tag);
if (match.length > 0)
return match.find(t => t.format === item.format) ?? match[0];
}
let tagObj = undefined;
let obj;
if (isScalar$1(item)) {
obj = item.value;
const match = tags.filter(t => t.identify?.(obj));
tagObj =
match.find(t => t.format === item.format) ?? match.find(t => !t.format);
}
else {
obj = item;
tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);
}
if (!tagObj) {
const name = obj?.constructor?.name ?? typeof obj;
throw new Error(`Tag not resolved for ${name} value`);
}
return tagObj;
}
// needs to be called before value stringifier to allow for circular anchor refs
function stringifyProps(node, tagObj, { anchors, doc }) {
if (!doc.directives)
return '';
const props = [];
const anchor = (isScalar$1(node) || isCollection$1(node)) && node.anchor;
if (anchor && anchorIsValid(anchor)) {
anchors.add(anchor);
props.push(`&${anchor}`);
}
const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;
if (tag)
props.push(doc.directives.tagString(tag));
return props.join(' ');
}
function stringify$2(item, ctx, onComment, onChompKeep) {
if (isPair(item))
return item.toString(ctx, onComment, onChompKeep);
if (isAlias(item)) {
if (ctx.doc.directives)
return item.toString(ctx);
if (ctx.resolvedAliases?.has(item)) {
throw new TypeError(`Cannot stringify circular structure without alias nodes`);
}
else {
if (ctx.resolvedAliases)
ctx.resolvedAliases.add(item);
else
ctx.resolvedAliases = new Set([item]);
item = item.resolve(ctx.doc);
}
}
let tagObj = undefined;
const node = isNode$1(item)
? item
: ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });
if (!tagObj)
tagObj = getTagObject(ctx.doc.schema.tags, node);
const props = stringifyProps(node, tagObj, ctx);
if (props.length > 0)
ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1;
const str = typeof tagObj.stringify === 'function'
? tagObj.stringify(node, ctx, onComment, onChompKeep)
: isScalar$1(node)
? stringifyString(node, ctx, onComment, onChompKeep)
: node.toString(ctx, onComment, onChompKeep);
if (!props)
return str;
return isScalar$1(node) || str[0] === '{' || str[0] === '['
? `${props} ${str}`
: `${props}\n${ctx.indent}${str}`;
}
function stringifyPair({ key, value }, ctx, onComment, onChompKeep) {
const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;
let keyComment = (isNode$1(key) && key.comment) || null;
if (simpleKeys) {
if (keyComment) {
throw new Error('With simple keys, key nodes cannot have comments');
}
if (isCollection$1(key) || (!isNode$1(key) && typeof key === 'object')) {
const msg = 'With simple keys, collection cannot be used as a key value';
throw new Error(msg);
}
}
let explicitKey = !simpleKeys &&
(!key ||
(keyComment && value == null && !ctx.inFlow) ||
isCollection$1(key) ||
(isScalar$1(key)
? key.type === Scalar.BLOCK_FOLDED || key.type === Scalar.BLOCK_LITERAL
: typeof key === 'object'));
ctx = Object.assign({}, ctx, {
allNullValues: false,
implicitKey: !explicitKey && (simpleKeys || !allNullValues),
indent: indent + indentStep
});
let keyCommentDone = false;
let chompKeep = false;
let str = stringify$2(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));
if (!explicitKey && !ctx.inFlow && str.length > 1024) {
if (simpleKeys)
throw new Error('With simple keys, single line scalar must not span more than 1024 characters');
explicitKey = true;
}
if (ctx.inFlow) {
if (allNullValues || value == null) {
if (keyCommentDone && onComment)
onComment();
return str === '' ? '?' : explicitKey ? `? ${str}` : str;
}
}
else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {
str = `? ${str}`;
if (keyComment && !keyCommentDone) {
str += lineComment(str, ctx.indent, commentString(keyComment));
}
else if (chompKeep && onChompKeep)
onChompKeep();
return str;
}
if (keyCommentDone)
keyComment = null;
if (explicitKey) {
if (keyComment)
str += lineComment(str, ctx.indent, commentString(keyComment));
str = `? ${str}\n${indent}:`;
}
else {
str = `${str}:`;
if (keyComment)
str += lineComment(str, ctx.indent, commentString(keyComment));
}
let vsb, vcb, valueComment;
if (isNode$1(value)) {
vsb = !!value.spaceBefore;
vcb = value.commentBefore;
valueComment = value.comment;
}
else {
vsb = false;
vcb = null;
valueComment = null;
if (value && typeof value === 'object')
value = doc.createNode(value);
}
ctx.implicitKey = false;
if (!explicitKey && !keyComment && isScalar$1(value))
ctx.indentAtStart = str.length + 1;
chompKeep = false;
if (!indentSeq &&
indentStep.length >= 2 &&
!ctx.inFlow &&
!explicitKey &&
isSeq(value) &&
!value.flow &&
!value.tag &&
!value.anchor) {
// If indentSeq === false, consider '- ' as part of indentation where possible
ctx.indent = ctx.indent.substring(2);
}
let valueCommentDone = false;
const valueStr = stringify$2(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));
let ws = ' ';
if (keyComment || vsb || vcb) {
ws = vsb ? '\n' : '';
if (vcb) {
const cs = commentString(vcb);
ws += `\n${indentComment(cs, ctx.indent)}`;
}
if (valueStr === '' && !ctx.inFlow) {
if (ws === '\n')
ws = '\n\n';
}
else {
ws += `\n${ctx.indent}`;
}
}
else if (!explicitKey && isCollection$1(value)) {
const vs0 = valueStr[0];
const nl0 = valueStr.indexOf('\n');
const hasNewline = nl0 !== -1;
const flow = ctx.inFlow ?? value.flow ?? value.items.length === 0;
if (hasNewline || !flow) {
let hasPropsLine = false;
if (hasNewline && (vs0 === '&' || vs0 === '!')) {
let sp0 = valueStr.indexOf(' ');
if (vs0 === '&' &&
sp0 !== -1 &&
sp0 < nl0 &&
valueStr[sp0 + 1] === '!') {
sp0 = valueStr.indexOf(' ', sp0 + 1);
}
if (sp0 === -1 || nl0 < sp0)
hasPropsLine = true;
}
if (!hasPropsLine)
ws = `\n${ctx.indent}`;
}
}
else if (valueStr === '' || valueStr[0] === '\n') {
ws = '';
}
str += ws + valueStr;
if (ctx.inFlow) {
if (valueCommentDone && onComment)
onComment();
}
else if (valueComment && !valueCommentDone) {
str += lineComment(str, ctx.indent, commentString(valueComment));
}
else if (chompKeep && onChompKeep) {
onChompKeep();
}
return str;
}
function warn(logLevel, warning) {
if (logLevel === 'debug' || logLevel === 'warn') {
// https://github.com/typescript-eslint/typescript-eslint/issues/7478
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain
if (typeof process !== 'undefined' && process.emitWarning)
process.emitWarning(warning);
else
console.warn(warning);
}
}
const MERGE_KEY = '<<';
function addPairToJSMap(ctx, map, { key, value }) {
if (ctx?.doc.schema.merge && isMergeKey(key)) {
value = isAlias(value) ? value.resolve(ctx.doc) : value;
if (isSeq(value))
for (const it of value.items)
mergeToJSMap(ctx, map, it);
else if (Array.isArray(value))
for (const it of value)
mergeToJSMap(ctx, map, it);
else
mergeToJSMap(ctx, map, value);
}
else {
const jsKey = toJS(key, '', ctx);
if (map instanceof Map) {
map.set(jsKey, toJS(value, jsKey, ctx));
}
else if (map instanceof Set) {
map.add(jsKey);
}
else {
const stringKey = stringifyKey(key, jsKey, ctx);
const jsValue = toJS(value, stringKey, ctx);
if (stringKey in map)
Object.defineProperty(map, stringKey, {
value: jsValue,
writable: true,
enumerable: true,
configurable: true
});
else
map[stringKey] = jsValue;
}
}
return map;
}
const isMergeKey = (key) => key === MERGE_KEY ||
(isScalar$1(key) &&
key.value === MERGE_KEY &&
(!key.type || key.type === Scalar.PLAIN));
// If the value associated with a merge key is a single mapping node, each of
// its key/value pairs is inserted into the current mapping, unless the key
// already exists in it. If the value associated with the merge key is a
// sequence, then this sequence is expected to contain mapping nodes and each
// of these nodes is merged in turn according to its order in the sequence.
// Keys in mapping nodes earlier in the sequence override keys specified in
// later mapping nodes. -- http://yaml.org/type/merge.html
function mergeToJSMap(ctx, map, value) {
const source = ctx && isAlias(value) ? value.resolve(ctx.doc) : value;
if (!isMap(source))
throw new Error('Merge sources must be maps or map aliases');
const srcMap = source.toJSON(null, ctx, Map);
for (const [key, value] of srcMap) {
if (map instanceof Map) {
if (!map.has(key))
map.set(key, value);
}
else if (map instanceof Set) {
map.add(key);
}
else if (!Object.prototype.hasOwnProperty.call(map, key)) {
Object.defineProperty(map, key, {
value,
writable: true,
enumerable: true,
configurable: true
});
}
}
return map;
}
function stringifyKey(key, jsKey, ctx) {
if (jsKey === null)
return '';
if (typeof jsKey !== 'object')
return String(jsKey);
if (isNode$1(key) && ctx?.doc) {
const strCtx = createStringifyContext(ctx.doc, {});
strCtx.anchors = new Set();
for (const node of ctx.anchors.keys())
strCtx.anchors.add(node.anchor);
strCtx.inFlow = true;
strCtx.inStringifyKey = true;
const strKey = key.toString(strCtx);
if (!ctx.mapKeyWarned) {
let jsonStr = JSON.stringify(strKey);
if (jsonStr.length > 40)
jsonStr = jsonStr.substring(0, 36) + '..."';
warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);
ctx.mapKeyWarned = true;
}
return strKey;
}
return JSON.stringify(jsKey);
}
function createPair(key, value, ctx) {
const k = createNode(key, undefined, ctx);
const v = createNode(value, undefined, ctx);
return new Pair(k, v);
}
class Pair {
constructor(key, value = null) {
Object.defineProperty(this, NODE_TYPE, { value: PAIR });
this.key = key;
this.value = value;
}
clone(schema) {
let { key, value } = this;
if (isNode$1(key))
key = key.clone(schema);
if (isNode$1(value))
value = value.clone(schema);
return new Pair(key, value);
}
toJSON(_, ctx) {
const pair = ctx?.mapAsMap ? new Map() : {};
return addPairToJSMap(ctx, pair, this);
}
toString(ctx, onComment, onChompKeep) {
return ctx?.doc
? stringifyPair(this, ctx, onComment, onChompKeep)
: JSON.stringify(this);
}
}
function stringifyCollection(collection, ctx, options) {
const flow = ctx.inFlow ?? collection.flow;
const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;
return stringify(collection, ctx, options);
}
function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {
const { indent, options: { commentString } } = ctx;
const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });
let chompKeep = false; // flag for the preceding node's status
const lines = [];
for (let i = 0; i < items.length; ++i) {
const item = items[i];
let comment = null;
if (isNode$1(item)) {
if (!chompKeep && item.spaceBefore)
lines.push('');
addCommentBefore(ctx, lines, item.commentBefore, chompKeep);
if (item.comment)
comment = item.comment;
}
else if (isPair(item)) {
const ik = isNode$1(item.key) ? item.key : null;
if (ik) {
if (!chompKeep && ik.spaceBefore)
lines.push('');
addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);
}
}
chompKeep = false;
let str = stringify$2(item, itemCtx, () => (comment = null), () => (chompKeep = true));
if (comment)
str += lineComment(str, itemIndent, commentString(comment));
if (chompKeep && comment)
chompKeep = false;
lines.push(blockItemPrefix + str);
}
let str;
if (lines.length === 0) {
str = flowChars.start + flowChars.end;
}
else {
str = lines[0];
for (let i = 1; i < lines.length; ++i) {
const line = lines[i];
str += line ? `\n${indent}${line}` : '\n';
}
}
if (comment) {
str += '\n' + indentComment(commentString(comment), indent);
if (onComment)
onComment();
}
else if (chompKeep && onChompKeep)
onChompKeep();
return str;
}
function stringifyFlowCollection({ items }, ctx, { flowChars, itemIndent }) {
const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx;
itemIndent += indentStep;
const itemCtx = Object.assign({}, ctx, {
indent: itemIndent,
inFlow: true,
type: null
});
let reqNewline = false;
let linesAtValue = 0;
const lines = [];
for (let i = 0; i < items.length; ++i) {
const item = items[i];
let comment = null;
if (isNode$1(item)) {
if (item.spaceBefore)
lines.push('');
addCommentBefore(ctx, lines, item.commentBefore, false);
if (item.comment)
comment = item.comment;
}
else if (isPair(item)) {
const ik = isNode$1(item.key) ? item.key : null;
if (ik) {
if (ik.spaceBefore)
lines.push('');
addCommentBefore(ctx, lines, ik.commentBefore, false);
if (ik.comment)
reqNewline = true;
}
const iv = isNode$1(item.value) ? item.value : null;
if (iv) {
if (iv.comment)
comment = iv.comment;
if (iv.commentBefore)
reqNewline = true;
}
else if (item.value == null && ik?.comment) {
comment = ik.comment;
}
}
if (comment)
reqNewline = true;
let str = stringify$2(item, itemCtx, () => (comment = null));
if (i < items.length - 1)
str += ',';
if (comment)
str += lineComment(str, itemIndent, commentString(comment));
if (!reqNewline && (lines.length > linesAtValue || str.includes('\n')))
reqNewline = true;
lines.push(str);
linesAtValue = lines.length;
}
const { start, end } = flowChars;
if (lines.length === 0) {
return start + end;
}
else {
if (!reqNewline) {
const len = lines.reduce((sum, line) => sum + line.length + 2, 2);
reqNewline = ctx.options.lineWidth > 0 && len > ctx.options.lineWidth;
}
if (reqNewline) {
let str = start;
for (const line of lines)
str += line ? `\n${indentStep}${indent}${line}` : '\n';
return `${str}\n${indent}${end}`;
}
else {
return `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`;
}
}
}
function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {
if (comment && chompKeep)
comment = comment.replace(/^\n+/, '');
if (comment) {
const ic = indentComment(commentString(comment), indent);
lines.push(ic.trimStart()); // Avoid double indent on first line
}
}
function findPair(items, key) {
const k = isScalar$1(key) ? key.value : key;
for (const it of items) {
if (isPair(it)) {
if (it.key === key || it.key === k)
return it;
if (isScalar$1(it.key) && it.key.value === k)
return it;
}
}
return undefined;
}
class YAMLMap extends Collection {
static get tagName() {
return 'tag:yaml.org,2002:map';
}
constructor(schema) {
super(MAP, schema);
this.items = [];
}
/**
* A generic collection parsing method that can be extended
* to other node classes that inherit from YAMLMap
*/
static from(schema, obj, ctx) {
const { keepUndefined, replacer } = ctx;
const map = new this(schema);
const add = (key, value) => {
if (typeof replacer === 'function')
value = replacer.call(obj, key, value);
else if (Array.isArray(replacer) && !replacer.includes(key))
return;
if (value !== undefined || keepUndefined)
map.items.push(createPair(key, value, ctx));
};
if (obj instanceof Map) {
for (const [key, value] of obj)
add(key, value);
}
else if (obj && typeof obj === 'object') {
for (const key of Object.keys(obj))
add(key, obj[key]);
}
if (typeof schema.sortMapEntries === 'function') {
map.items.sort(schema.sortMapEntries);
}
return map;
}
/**
* Adds a value to the collection.
*
* @param overwrite - If not set `true`, using a key that is already in the
* collection will throw. Otherwise, overwrites the previous value.
*/
add(pair, overwrite) {
let _pair;
if (isPair(pair))
_pair = pair;
else if (!pair || typeof pair !== 'object' || !('key' in pair)) {
// In TypeScript, this never happens.
_pair = new Pair(pair, pair?.value);
}
else
_pair = new Pair(pair.key, pair.value);
const prev = findPair(this.items, _pair.key);
const sortEntries = this.schema?.sortMapEntries;
if (prev) {
if (!overwrite)
throw new Error(`Key ${_pair.key} already set`);
// For scalars, keep the old node & its comments and anchors
if (isScalar$1(prev.value) && isScalarValue(_pair.value))
prev.value.value = _pair.value;
else
prev.value = _pair.value;
}
else if (sortEntries) {
const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);
if (i === -1)
this.items.push(_pair);
else
this.items.splice(i, 0, _pair);
}
else {
this.items.push(_pair);
}
}
delete(key) {
const it = findPair(this.items, key);
if (!it)
return false;
const del = this.items.splice(this.items.indexOf(it), 1);
return del.length > 0;
}
get(key, keepScalar) {
const it = findPair(this.items, key);
const node = it?.value;
return (!keepScalar && isScalar$1(node) ? node.value : node) ?? undefined;
}
has(key) {
return !!findPair(this.items, key);
}
set(key, value) {
this.add(new Pair(key, value), true);
}
/**
* @param ctx - Conversion context, originally set in Document#toJS()
* @param {Class} Type - If set, forces the returned collection type
* @returns Instance of Type, Map, or Object
*/
toJSON(_, ctx, Type) {
const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {};
if (ctx?.onCreate)
ctx.onCreate(map);
for (const item of this.items)
addPairToJSMap(ctx, map, item);
return map;
}
toString(ctx, onComment, onChompKeep) {
if (!ctx)
return JSON.stringify(this);
for (const item of this.items) {
if (!isPair(item))
throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);
}
if (!ctx.allNullValues && this.hasAllNullValues(false))
ctx = Object.assign({}, ctx, { allNullValues: true });
return stringifyCollection(this, ctx, {
blockItemPrefix: '',
flowChars: { start: '{', end: '}' },
itemIndent: ctx.indent || '',
onChompKeep,
onComment
});
}
}
const map$1 = {
collection: 'map',
default: true,
nodeClass: YAMLMap,
tag: 'tag:yaml.org,2002:map',
resolve(map, onError) {
if (!isMap(map))
onError('Expected a mapping for this tag');
return map;
},
createNode: (schema, obj, ctx) => YAMLMap.from(schema, obj, ctx)
};
class YAMLSeq extends Collection {
static get tagName() {
return 'tag:yaml.org,2002:seq';
}
constructor(schema) {
super(SEQ, schema);
this.items = [];
}
add(value) {
this.items.push(value);
}
/**
* Removes a value from the collection.
*
* `key` must contain a representation of an integer for this to succeed.
* It may be wrapped in a `Scalar`.
*
* @returns `true` if the item was found and removed.
*/
delete(key) {
const idx = asItemIndex(key);
if (typeof idx !== 'number')
return false;
const del = this.items.splice(idx, 1);
return del.length > 0;
}
get(key, keepScalar) {
const idx = asItemIndex(key);
if (typeof idx !== 'number')
return undefined;
const it = this.items[idx];
return !keepScalar && isScalar$1(it) ? it.value : it;
}
/**
* Checks if the collection includes a value with the key `key`.
*
* `key` must contain a representation of an integer for this to succeed.
* It may be wrapped in a `Scalar`.
*/
has(key) {
const idx = asItemIndex(key);
return typeof idx === 'number' && idx < this.items.length;
}
/**
* Sets a value in this collection. For `!!set`, `value` needs to be a
* boolean to add/remove the item from the set.
*
* If `key` does not contain a representation of an integer, this will throw.
* It may be wrapped in a `Scalar`.
*/
set(key, value) {
const idx = asItemIndex(key);
if (typeof idx !== 'number')
throw new Error(`Expected a valid index, not ${key}.`);
const prev = this.items[idx];
if (isScalar$1(prev) && isScalarValue(value))
prev.value = value;
else
this.items[idx] = value;
}
toJSON(_, ctx) {
const seq = [];
if (ctx?.onCreate)
ctx.onCreate(seq);
let i = 0;
for (const item of this.items)
seq.push(toJS(item, String(i++), ctx));
return seq;
}
toString(ctx, onComment, onChompKeep) {
if (!ctx)
return JSON.stringify(this);
return stringifyCollection(this, ctx, {
blockItemPrefix: '- ',
flowChars: { start: '[', end: ']' },
itemIndent: (ctx.indent || '') + ' ',
onChompKeep,
onComment
});
}
static from(schema, obj, ctx) {
const { replacer } = ctx;
const seq = new this(schema);
if (obj && Symbol.iterator in Object(obj)) {
let i = 0;
for (let it of obj) {
if (typeof replacer === 'function') {
const key = obj instanceof Set ? it : String(i++);
it = replacer.call(obj, key, it);
}
seq.items.push(createNode(it, undefined, ctx));
}
}
return seq;
}
}
function asItemIndex(key) {
let idx = isScalar$1(key) ? key.value : key;
if (idx && typeof idx === 'string')
idx = Number(idx);
return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0
? idx
: null;
}
const seq = {
collection: 'seq',
default: true,
nodeClass: YAMLSeq,
tag: 'tag:yaml.org,2002:seq',
resolve(seq, onError) {
if (!isSeq(seq))
onError('Expected a sequence for this tag');
return seq;
},
createNode: (schema, obj, ctx) => YAMLSeq.from(schema, obj, ctx)
};
const string = {
identify: value => typeof value === 'string',
default: true,
tag: 'tag:yaml.org,2002:str',
resolve: str => str,
stringify(item, ctx, onComment, onChompKeep) {
ctx = Object.assign({ actualString: true }, ctx);
return stringifyString(item, ctx, onComment, onChompKeep);
}
};
const nullTag = {
identify: value => value == null,
createNode: () => new Scalar(null),
default: true,
tag: 'tag:yaml.org,2002:null',
test: /^(?:~|[Nn]ull|NULL)?$/,
resolve: () => new Scalar(null),
stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)
? source
: ctx.options.nullStr
};
const boolTag = {
identify: value => typeof value === 'boolean',
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,
resolve: str => new Scalar(str[0] === 't' || str[0] === 'T'),
stringify({ source, value }, ctx) {
if (source && boolTag.test.test(source)) {
const sv = source[0] === 't' || source[0] === 'T';
if (value === sv)
return source;
}
return value ? ctx.options.trueStr : ctx.options.falseStr;
}
};
function stringifyNumber({ format, minFractionDigits, tag, value }) {
if (typeof value === 'bigint')
return String(value);
const num = typeof value === 'number' ? value : Number(value);
if (!isFinite(num))
return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';
let n = JSON.stringify(value);
if (!format &&
minFractionDigits &&
(!tag || tag === 'tag:yaml.org,2002:float') &&
/^\d/.test(n)) {
let i = n.indexOf('.');
if (i < 0) {
i = n.length;
n += '.';
}
let d = minFractionDigits - (n.length - i - 1);
while (d-- > 0)
n += '0';
}
return n;
}
const floatNaN$1 = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/,
resolve: str => str.slice(-3).toLowerCase() === 'nan'
? NaN
: str[0] === '-'
? Number.NEGATIVE_INFINITY
: Number.POSITIVE_INFINITY,
stringify: stringifyNumber
};
const floatExp$1 = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
format: 'EXP',
test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/,
resolve: str => parseFloat(str),
stringify(node) {
const num = Number(node.value);
return isFinite(num) ? num.toExponential() : stringifyNumber(node);
}
};
const float$1 = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/,
resolve(str) {
const node = new Scalar(parseFloat(str));
const dot = str.indexOf('.');
if (dot !== -1 && str[str.length - 1] === '0')
node.minFractionDigits = str.length - dot - 1;
return node;
},
stringify: stringifyNumber
};
const intIdentify$2 = (value) => typeof value === 'bigint' || Number.isInteger(value);
const intResolve$1 = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));
function intStringify$1(node, radix, prefix) {
const { value } = node;
if (intIdentify$2(value) && value >= 0)
return prefix + value.toString(radix);
return stringifyNumber(node);
}
const intOct$1 = {
identify: value => intIdentify$2(value) && value >= 0,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'OCT',
test: /^0o[0-7]+$/,
resolve: (str, _onError, opt) => intResolve$1(str, 2, 8, opt),
stringify: node => intStringify$1(node, 8, '0o')
};
const int$1 = {
identify: intIdentify$2,
default: true,
tag: 'tag:yaml.org,2002:int',
test: /^[-+]?[0-9]+$/,
resolve: (str, _onError, opt) => intResolve$1(str, 0, 10, opt),
stringify: stringifyNumber
};
const intHex$1 = {
identify: value => intIdentify$2(value) && value >= 0,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'HEX',
test: /^0x[0-9a-fA-F]+$/,
resolve: (str, _onError, opt) => intResolve$1(str, 2, 16, opt),
stringify: node => intStringify$1(node, 16, '0x')
};
const schema$2 = [
map$1,
seq,
string,
nullTag,
boolTag,
intOct$1,
int$1,
intHex$1,
floatNaN$1,
floatExp$1,
float$1
];
function intIdentify$1(value) {
return typeof value === 'bigint' || Number.isInteger(value);
}
const stringifyJSON = ({ value }) => JSON.stringify(value);
const jsonScalars = [
{
identify: value => typeof value === 'string',
default: true,
tag: 'tag:yaml.org,2002:str',
resolve: str => str,
stringify: stringifyJSON
},
{
identify: value => value == null,
createNode: () => new Scalar(null),
default: true,
tag: 'tag:yaml.org,2002:null',
test: /^null$/,
resolve: () => null,
stringify: stringifyJSON
},
{
identify: value => typeof value === 'boolean',
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^true|false$/,
resolve: str => str === 'true',
stringify: stringifyJSON
},
{
identify: intIdentify$1,
default: true,
tag: 'tag:yaml.org,2002:int',
test: /^-?(?:0|[1-9][0-9]*)$/,
resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),
stringify: ({ value }) => intIdentify$1(value) ? value.toString() : JSON.stringify(value)
},
{
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,
resolve: str => parseFloat(str),
stringify: stringifyJSON
}
];
const jsonError = {
default: true,
tag: '',
test: /^/,
resolve(str, onError) {
onError(`Unresolved plain scalar ${JSON.stringify(str)}`);
return str;
}
};
const schema$1 = [map$1, seq].concat(jsonScalars, jsonError);
const binary = {
identify: value => value instanceof Uint8Array, // Buffer inherits from Uint8Array
default: false,
tag: 'tag:yaml.org,2002:binary',
/**
* Returns a Buffer in node and an Uint8Array in browsers
*
* To use the resulting buffer as an image, you'll want to do something like:
*
* const blob = new Blob([buffer], { type: 'image/jpeg' })
* document.querySelector('#photo').src = URL.createObjectURL(blob)
*/
resolve(src, onError) {
if (typeof Buffer === 'function') {
return Buffer.from(src, 'base64');
}
else if (typeof atob === 'function') {
// On IE 11, atob() can't handle newlines
const str = atob(src.replace(/[\n\r]/g, ''));
const buffer = new Uint8Array(str.length);
for (let i = 0; i < str.length; ++i)
buffer[i] = str.charCodeAt(i);
return buffer;
}
else {
onError('This environment does not support reading binary tags; either Buffer or atob is required');
return src;
}
},
stringify({ comment, type, value }, ctx, onComment, onChompKeep) {
const buf = value; // checked earlier by binary.identify()
let str;
if (typeof Buffer === 'function') {
str =
buf instanceof Buffer
? buf.toString('base64')
: Buffer.from(buf.buffer).toString('base64');
}
else if (typeof btoa === 'function') {
let s = '';
for (let i = 0; i < buf.length; ++i)
s += String.fromCharCode(buf[i]);
str = btoa(s);
}
else {
throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');
}
if (!type)
type = Scalar.BLOCK_LITERAL;
if (type !== Scalar.QUOTE_DOUBLE) {
const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);
const n = Math.ceil(str.length / lineWidth);
const lines = new Array(n);
for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {
lines[i] = str.substr(o, lineWidth);
}
str = lines.join(type === Scalar.BLOCK_LITERAL ? '\n' : ' ');
}
return stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);
}
};
function resolvePairs(seq, onError) {
if (isSeq(seq)) {
for (let i = 0; i < seq.items.length; ++i) {
let item = seq.items[i];
if (isPair(item))
continue;
else if (isMap(item)) {
if (item.items.length > 1)
onError('Each pair must have its own sequence indicator');
const pair = item.items[0] || new Pair(new Scalar(null));
if (item.commentBefore)
pair.key.commentBefore = pair.key.commentBefore
? `${item.commentBefore}\n${pair.key.commentBefore}`
: item.commentBefore;
if (item.comment) {
const cn = pair.value ?? pair.key;
cn.comment = cn.comment
? `${item.comment}\n${cn.comment}`
: item.comment;
}
item = pair;
}
seq.items[i] = isPair(item) ? item : new Pair(item);
}
}
else
onError('Expected a sequence for this tag');
return seq;
}
function createPairs(schema, iterable, ctx) {
const { replacer } = ctx;
const pairs = new YAMLSeq(schema);
pairs.tag = 'tag:yaml.org,2002:pairs';
let i = 0;
if (iterable && Symbol.iterator in Object(iterable))
for (let it of iterable) {
if (typeof replacer === 'function')
it = replacer.call(iterable, String(i++), it);
let key, value;
if (Array.isArray(it)) {
if (it.length === 2) {
key = it[0];
value = it[1];
}
else
throw new TypeError(`Expected [key, value] tuple: ${it}`);
}
else if (it && it instanceof Object) {
const keys = Object.keys(it);
if (keys.length === 1) {
key = keys[0];
value = it[key];
}
else {
throw new TypeError(`Expected tuple with one key, not ${keys.length} keys`);
}
}
else {
key = it;
}
pairs.items.push(createPair(key, value, ctx));
}
return pairs;
}
const pairs = {
collection: 'seq',
default: false,
tag: 'tag:yaml.org,2002:pairs',
resolve: resolvePairs,
createNode: createPairs
};
class YAMLOMap extends YAMLSeq {
constructor() {
super();
this.add = YAMLMap.prototype.add.bind(this);
this.delete = YAMLMap.prototype.delete.bind(this);
this.get = YAMLMap.prototype.get.bind(this);
this.has = YAMLMap.prototype.has.bind(this);
this.set = YAMLMap.prototype.set.bind(this);
this.tag = YAMLOMap.tag;
}
/**
* If `ctx` is given, the return type is actually `Map<unknown, unknown>`,
* but TypeScript won't allow widening the signature of a child method.
*/
toJSON(_, ctx) {
if (!ctx)
return super.toJSON(_);
const map = new Map();
if (ctx?.onCreate)
ctx.onCreate(map);
for (const pair of this.items) {
let key, value;
if (isPair(pair)) {
key = toJS(pair.key, '', ctx);
value = toJS(pair.value, key, ctx);
}
else {
key = toJS(pair, '', ctx);
}
if (map.has(key))
throw new Error('Ordered maps must not include duplicate keys');
map.set(key, value);
}
return map;
}
static from(schema, iterable, ctx) {
const pairs = createPairs(schema, iterable, ctx);
const omap = new this();
omap.items = pairs.items;
return omap;
}
}
YAMLOMap.tag = 'tag:yaml.org,2002:omap';
const omap = {
collection: 'seq',
identify: value => value instanceof Map,
nodeClass: YAMLOMap,
default: false,
tag: 'tag:yaml.org,2002:omap',
resolve(seq, onError) {
const pairs = resolvePairs(seq, onError);
const seenKeys = [];
for (const { key } of pairs.items) {
if (isScalar$1(key)) {
if (seenKeys.includes(key.value)) {
onError(`Ordered maps must not include duplicate keys: ${key.value}`);
}
else {
seenKeys.push(key.value);
}
}
}
return Object.assign(new YAMLOMap(), pairs);
},
createNode: (schema, iterable, ctx) => YAMLOMap.from(schema, iterable, ctx)
};
function boolStringify({ value, source }, ctx) {
const boolObj = value ? trueTag : falseTag;
if (source && boolObj.test.test(source))
return source;
return value ? ctx.options.trueStr : ctx.options.falseStr;
}
const trueTag = {
identify: value => value === true,
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,
resolve: () => new Scalar(true),
stringify: boolStringify
};
const falseTag = {
identify: value => value === false,
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/,
resolve: () => new Scalar(false),
stringify: boolStringify
};
const floatNaN = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/,
resolve: (str) => str.slice(-3).toLowerCase() === 'nan'
? NaN
: str[0] === '-'
? Number.NEGATIVE_INFINITY
: Number.POSITIVE_INFINITY,
stringify: stringifyNumber
};
const floatExp = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
format: 'EXP',
test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/,
resolve: (str) => parseFloat(str.replace(/_/g, '')),
stringify(node) {
const num = Number(node.value);
return isFinite(num) ? num.toExponential() : stringifyNumber(node);
}
};
const float = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/,
resolve(str) {
const node = new Scalar(parseFloat(str.replace(/_/g, '')));
const dot = str.indexOf('.');
if (dot !== -1) {
const f = str.substring(dot + 1).replace(/_/g, '');
if (f[f.length - 1] === '0')
node.minFractionDigits = f.length;
}
return node;
},
stringify: stringifyNumber
};
const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);
function intResolve(str, offset, radix, { intAsBigInt }) {
const sign = str[0];
if (sign === '-' || sign === '+')
offset += 1;
str = str.substring(offset).replace(/_/g, '');
if (intAsBigInt) {
switch (radix) {
case 2:
str = `0b${str}`;
break;
case 8:
str = `0o${str}`;
break;
case 16:
str = `0x${str}`;
break;
}
const n = BigInt(str);
return sign === '-' ? BigInt(-1) * n : n;
}
const n = parseInt(str, radix);
return sign === '-' ? -1 * n : n;
}
function intStringify(node, radix, prefix) {
const { value } = node;
if (intIdentify(value)) {
const str = value.toString(radix);
return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;
}
return stringifyNumber(node);
}
const intBin = {
identify: intIdentify,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'BIN',
test: /^[-+]?0b[0-1_]+$/,
resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),
stringify: node => intStringify(node, 2, '0b')
};
const intOct = {
identify: intIdentify,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'OCT',
test: /^[-+]?0[0-7_]+$/,
resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),
stringify: node => intStringify(node, 8, '0')
};
const int = {
identify: intIdentify,
default: true,
tag: 'tag:yaml.org,2002:int',
test: /^[-+]?[0-9][0-9_]*$/,
resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),
stringify: stringifyNumber
};
const intHex = {
identify: intIdentify,
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'HEX',
test: /^[-+]?0x[0-9a-fA-F_]+$/,
resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),
stringify: node => intStringify(node, 16, '0x')
};
class YAMLSet extends YAMLMap {
constructor(schema) {
super(schema);
this.tag = YAMLSet.tag;
}
add(key) {
let pair;
if (isPair(key))
pair = key;
else if (key &&
typeof key === 'object' &&
'key' in key &&
'value' in key &&
key.value === null)
pair = new Pair(key.key, null);
else
pair = new Pair(key, null);
const prev = findPair(this.items, pair.key);
if (!prev)
this.items.push(pair);
}
/**
* If `keepPair` is `true`, returns the Pair matching `key`.
* Otherwise, returns the value of that Pair's key.
*/
get(key, keepPair) {
const pair = findPair(this.items, key);
return !keepPair && isPair(pair)
? isScalar$1(pair.key)
? pair.key.value
: pair.key
: pair;
}
set(key, value) {
if (typeof value !== 'boolean')
throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);
const prev = findPair(this.items, key);
if (prev && !value) {
this.items.splice(this.items.indexOf(prev), 1);
}
else if (!prev && value) {
this.items.push(new Pair(key));
}
}
toJSON(_, ctx) {
return super.toJSON(_, ctx, Set);
}
toString(ctx, onComment, onChompKeep) {
if (!ctx)
return JSON.stringify(this);
if (this.hasAllNullValues(true))
return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);
else
throw new Error('Set items must all have null values');
}
static from(schema, iterable, ctx) {
const { replacer } = ctx;
const set = new this(schema);
if (iterable && Symbol.iterator in Object(iterable))
for (let value of iterable) {
if (typeof replacer === 'function')
value = replacer.call(iterable, value, value);
set.items.push(createPair(value, null, ctx));
}
return set;
}
}
YAMLSet.tag = 'tag:yaml.org,2002:set';
const set = {
collection: 'map',
identify: value => value instanceof Set,
nodeClass: YAMLSet,
default: false,
tag: 'tag:yaml.org,2002:set',
createNode: (schema, iterable, ctx) => YAMLSet.from(schema, iterable, ctx),
resolve(map, onError) {
if (isMap(map)) {
if (map.hasAllNullValues(true))
return Object.assign(new YAMLSet(), map);
else
onError('Set items must all have null values');
}
else
onError('Expected a mapping for this tag');
return map;
}
};
/** Internal types handle bigint as number, because TS can't figure it out. */
function parseSexagesimal(str, asBigInt) {
const sign = str[0];
const parts = sign === '-' || sign === '+' ? str.substring(1) : str;
const num = (n) => asBigInt ? BigInt(n) : Number(n);
const res = parts
.replace(/_/g, '')
.split(':')
.reduce((res, p) => res * num(60) + num(p), num(0));
return (sign === '-' ? num(-1) * res : res);
}
/**
* hhhh:mm:ss.sss
*
* Internal types handle bigint as number, because TS can't figure it out.
*/
function stringifySexagesimal(node) {
let { value } = node;
let num = (n) => n;
if (typeof value === 'bigint')
num = n => BigInt(n);
else if (isNaN(value) || !isFinite(value))
return stringifyNumber(node);
let sign = '';
if (value < 0) {
sign = '-';
value *= num(-1);
}
const _60 = num(60);
const parts = [value % _60]; // seconds, including ms
if (value < 60) {
parts.unshift(0); // at least one : is required
}
else {
value = (value - parts[0]) / _60;
parts.unshift(value % _60); // minutes
if (value >= 60) {
value = (value - parts[0]) / _60;
parts.unshift(value); // hours
}
}
return (sign +
parts
.map(n => String(n).padStart(2, '0'))
.join(':')
.replace(/000000\d*$/, '') // % 60 may introduce error
);
}
const intTime = {
identify: value => typeof value === 'bigint' || Number.isInteger(value),
default: true,
tag: 'tag:yaml.org,2002:int',
format: 'TIME',
test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,
resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),
stringify: stringifySexagesimal
};
const floatTime = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
format: 'TIME',
test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/,
resolve: str => parseSexagesimal(str, false),
stringify: stringifySexagesimal
};
const timestamp = {
identify: value => value instanceof Date,
default: true,
tag: 'tag:yaml.org,2002:timestamp',
// If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part
// may be omitted altogether, resulting in a date format. In such a case, the time part is
// assumed to be 00:00:00Z (start of day, UTC).
test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd
'(?:' + // time is optional
'(?:t|T|[ \\t]+)' + // t | T | whitespace
'([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?
'(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30
')?$'),
resolve(str) {
const match = str.match(timestamp.test);
if (!match)
throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');
const [, year, month, day, hour, minute, second] = match.map(Number);
const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;
let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);
const tz = match[8];
if (tz && tz !== 'Z') {
let d = parseSexagesimal(tz, false);
if (Math.abs(d) < 30)
d *= 60;
date -= 60000 * d;
}
return new Date(date);
},
stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '')
};
const schema = [
map$1,
seq,
string,
nullTag,
trueTag,
falseTag,
intBin,
intOct,
int,
intHex,
floatNaN,
floatExp,
float,
binary,
omap,
pairs,
set,
intTime,
floatTime,
timestamp
];
const schemas = new Map([
['core', schema$2],
['failsafe', [map$1, seq, string]],
['json', schema$1],
['yaml11', schema],
['yaml-1.1', schema]
]);
const tagsByName = {
binary,
bool: boolTag,
float: float$1,
floatExp: floatExp$1,
floatNaN: floatNaN$1,
floatTime,
int: int$1,
intHex: intHex$1,
intOct: intOct$1,
intTime,
map: map$1,
null: nullTag,
omap,
pairs,
seq,
set,
timestamp
};
const coreKnownTags = {
'tag:yaml.org,2002:binary': binary,
'tag:yaml.org,2002:omap': omap,
'tag:yaml.org,2002:pairs': pairs,
'tag:yaml.org,2002:set': set,
'tag:yaml.org,2002:timestamp': timestamp
};
function getTags(customTags, schemaName) {
let tags = schemas.get(schemaName);
if (!tags) {
if (Array.isArray(customTags))
tags = [];
else {
const keys = Array.from(schemas.keys())
.filter(key => key !== 'yaml11')
.map(key => JSON.stringify(key))
.join(', ');
throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`);
}
}
if (Array.isArray(customTags)) {
for (const tag of customTags)
tags = tags.concat(tag);
}
else if (typeof customTags === 'function') {
tags = customTags(tags.slice());
}
return tags.map(tag => {
if (typeof tag !== 'string')
return tag;
const tagObj = tagsByName[tag];
if (tagObj)
return tagObj;
const keys = Object.keys(tagsByName)
.map(key => JSON.stringify(key))
.join(', ');
throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`);
});
}
const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
class Schema {
constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {
this.compat = Array.isArray(compat)
? getTags(compat, 'compat')
: compat
? getTags(null, compat)
: null;
this.merge = !!merge;
this.name = (typeof schema === 'string' && schema) || 'core';
this.knownTags = resolveKnownTags ? coreKnownTags : {};
this.tags = getTags(customTags, this.name);
this.toStringOptions = toStringDefaults ?? null;
Object.defineProperty(this, MAP, { value: map$1 });
Object.defineProperty(this, SCALAR$1, { value: string });
Object.defineProperty(this, SEQ, { value: seq });
// Used by createMap()
this.sortMapEntries =
typeof sortMapEntries === 'function'
? sortMapEntries
: sortMapEntries === true
? sortMapEntriesByKey
: null;
}
clone() {
const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));
copy.tags = this.tags.slice();
return copy;
}
}
function stringifyDocument(doc, options) {
const lines = [];
let hasDirectives = options.directives === true;
if (options.directives !== false && doc.directives) {
const dir = doc.directives.toString(doc);
if (dir) {
lines.push(dir);
hasDirectives = true;
}
else if (doc.directives.docStart)
hasDirectives = true;
}
if (hasDirectives)
lines.push('---');
const ctx = createStringifyContext(doc, options);
const { commentString } = ctx.options;
if (doc.commentBefore) {
if (lines.length !== 1)
lines.unshift('');
const cs = commentString(doc.commentBefore);
lines.unshift(indentComment(cs, ''));
}
let chompKeep = false;
let contentComment = null;
if (doc.contents) {
if (isNode$1(doc.contents)) {
if (doc.contents.spaceBefore && hasDirectives)
lines.push('');
if (doc.contents.commentBefore) {
const cs = commentString(doc.contents.commentBefore);
lines.push(indentComment(cs, ''));
}
// top-level block scalars need to be indented if followed by a comment
ctx.forceBlockIndent = !!doc.comment;
contentComment = doc.contents.comment;
}
const onChompKeep = contentComment ? undefined : () => (chompKeep = true);
let body = stringify$2(doc.contents, ctx, () => (contentComment = null), onChompKeep);
if (contentComment)
body += lineComment(body, '', commentString(contentComment));
if ((body[0] === '|' || body[0] === '>') &&
lines[lines.length - 1] === '---') {
// Top-level block scalars with a preceding doc marker ought to use the
// same line for their header.
lines[lines.length - 1] = `--- ${body}`;
}
else
lines.push(body);
}
else {
lines.push(stringify$2(doc.contents, ctx));
}
if (doc.directives?.docEnd) {
if (doc.comment) {
const cs = commentString(doc.comment);
if (cs.includes('\n')) {
lines.push('...');
lines.push(indentComment(cs, ''));
}
else {
lines.push(`... ${cs}`);
}
}
else {
lines.push('...');
}
}
else {
let dc = doc.comment;
if (dc && chompKeep)
dc = dc.replace(/^\n+/, '');
if (dc) {
if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')
lines.push('');
lines.push(indentComment(commentString(dc), ''));
}
}
return lines.join('\n') + '\n';
}
class Document {
constructor(value, replacer, options) {
/** A comment before this Document */
this.commentBefore = null;
/** A comment immediately after this Document */
this.comment = null;
/** Errors encountered during parsing. */
this.errors = [];
/** Warnings encountered during parsing. */
this.warnings = [];
Object.defineProperty(this, NODE_TYPE, { value: DOC });
let _replacer = null;
if (typeof replacer === 'function' || Array.isArray(replacer)) {
_replacer = replacer;
}
else if (options === undefined && replacer) {
options = replacer;
replacer = undefined;
}
const opt = Object.assign({
intAsBigInt: false,
keepSourceTokens: false,
logLevel: 'warn',
prettyErrors: true,
strict: true,
uniqueKeys: true,
version: '1.2'
}, options);
this.options = opt;
let { version } = opt;
if (options?._directives) {
this.directives = options._directives.atDocument();
if (this.directives.yaml.explicit)
version = this.directives.yaml.version;
}
else
this.directives = new Directives({ version });
this.setSchema(version, options);
// @ts-expect-error We can't really know that this matches Contents.
this.contents =
value === undefined ? null : this.createNode(value, _replacer, options);
}
/**
* Create a deep copy of this Document and its contents.
*
* Custom Node values that inherit from `Object` still refer to their original instances.
*/
clone() {
const copy = Object.create(Document.prototype, {
[NODE_TYPE]: { value: DOC }
});
copy.commentBefore = this.commentBefore;
copy.comment = this.comment;
copy.errors = this.errors.slice();
copy.warnings = this.warnings.slice();
copy.options = Object.assign({}, this.options);
if (this.directives)
copy.directives = this.directives.clone();
copy.schema = this.schema.clone();
// @ts-expect-error We can't really know that this matches Contents.
copy.contents = isNode$1(this.contents)
? this.contents.clone(copy.schema)
: this.contents;
if (this.range)
copy.range = this.range.slice();
return copy;
}
/** Adds a value to the document. */
add(value) {
if (assertCollection(this.contents))
this.contents.add(value);
}
/** Adds a value to the document. */
addIn(path, value) {
if (assertCollection(this.contents))
this.contents.addIn(path, value);
}
/**
* Create a new `Alias` node, ensuring that the target `node` has the required anchor.
*
* If `node` already has an anchor, `name` is ignored.
* Otherwise, the `node.anchor` value will be set to `name`,
* or if an anchor with that name is already present in the document,
* `name` will be used as a prefix for a new unique anchor.
* If `name` is undefined, the generated anchor will use 'a' as a prefix.
*/
createAlias(node, name) {
if (!node.anchor) {
const prev = anchorNames(this);
node.anchor =
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
!name || prev.has(name) ? findNewAnchor(name || 'a', prev) : name;
}
return new Alias(node.anchor);
}
createNode(value, replacer, options) {
let _replacer = undefined;
if (typeof replacer === 'function') {
value = replacer.call({ '': value }, '', value);
_replacer = replacer;
}
else if (Array.isArray(replacer)) {
const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;
const asStr = replacer.filter(keyToStr).map(String);
if (asStr.length > 0)
replacer = replacer.concat(asStr);
_replacer = replacer;
}
else if (options === undefined && replacer) {
options = replacer;
replacer = undefined;
}
const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {};
const { onAnchor, setAnchors, sourceObjects } = createNodeAnchors(this,
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
anchorPrefix || 'a');
const ctx = {
aliasDuplicateObjects: aliasDuplicateObjects ?? true,
keepUndefined: keepUndefined ?? false,
onAnchor,
onTagObj,
replacer: _replacer,
schema: this.schema,
sourceObjects
};
const node = createNode(value, tag, ctx);
if (flow && isCollection$1(node))
node.flow = true;
setAnchors();
return node;
}
/**
* Convert a key and a value into a `Pair` using the current schema,
* recursively wrapping all values as `Scalar` or `Collection` nodes.
*/
createPair(key, value, options = {}) {
const k = this.createNode(key, null, options);
const v = this.createNode(value, null, options);
return new Pair(k, v);
}
/**
* Removes a value from the document.
* @returns `true` if the item was found and removed.
*/
delete(key) {
return assertCollection(this.contents) ? this.contents.delete(key) : false;
}
/**
* Removes a value from the document.
* @returns `true` if the item was found and removed.
*/
deleteIn(path) {
if (isEmptyPath(path)) {
if (this.contents == null)
return false;
// @ts-expect-error Presumed impossible if Strict extends false
this.contents = null;
return true;
}
return assertCollection(this.contents)
? this.contents.deleteIn(path)
: false;
}
/**
* Returns item at `key`, or `undefined` if not found. By default unwraps
* scalar values from their surrounding node; to disable set `keepScalar` to
* `true` (collections are always returned intact).
*/
get(key, keepScalar) {
return isCollection$1(this.contents)
? this.contents.get(key, keepScalar)
: undefined;
}
/**
* Returns item at `path`, or `undefined` if not found. By default unwraps
* scalar values from their surrounding node; to disable set `keepScalar` to
* `true` (collections are always returned intact).
*/
getIn(path, keepScalar) {
if (isEmptyPath(path))
return !keepScalar && isScalar$1(this.contents)
? this.contents.value
: this.contents;
return isCollection$1(this.contents)
? this.contents.getIn(path, keepScalar)
: undefined;
}
/**
* Checks if the document includes a value with the key `key`.
*/
has(key) {
return isCollection$1(this.contents) ? this.contents.has(key) : false;
}
/**
* Checks if the document includes a value at `path`.
*/
hasIn(path) {
if (isEmptyPath(path))
return this.contents !== undefined;
return isCollection$1(this.contents) ? this.contents.hasIn(path) : false;
}
/**
* Sets a value in this document. For `!!set`, `value` needs to be a
* boolean to add/remove the item from the set.
*/
set(key, value) {
if (this.contents == null) {
// @ts-expect-error We can't really know that this matches Contents.
this.contents = collectionFromPath(this.schema, [key], value);
}
else if (assertCollection(this.contents)) {
this.contents.set(key, value);
}
}
/**
* Sets a value in this document. For `!!set`, `value` needs to be a
* boolean to add/remove the item from the set.
*/
setIn(path, value) {
if (isEmptyPath(path)) {
// @ts-expect-error We can't really know that this matches Contents.
this.contents = value;
}
else if (this.contents == null) {
// @ts-expect-error We can't really know that this matches Contents.
this.contents = collectionFromPath(this.schema, Array.from(path), value);
}
else if (assertCollection(this.contents)) {
this.contents.setIn(path, value);
}
}
/**
* Change the YAML version and schema used by the document.
* A `null` version disables support for directives, explicit tags, anchors, and aliases.
* It also requires the `schema` option to be given as a `Schema` instance value.
*
* Overrides all previously set schema options.
*/
setSchema(version, options = {}) {
if (typeof version === 'number')
version = String(version);
let opt;
switch (version) {
case '1.1':
if (this.directives)
this.directives.yaml.version = '1.1';
else
this.directives = new Directives({ version: '1.1' });
opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };
break;
case '1.2':
case 'next':
if (this.directives)
this.directives.yaml.version = version;
else
this.directives = new Directives({ version });
opt = { merge: false, resolveKnownTags: true, schema: 'core' };
break;
case null:
if (this.directives)
delete this.directives;
opt = null;
break;
default: {
const sv = JSON.stringify(version);
throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);
}
}
// Not using `instanceof Schema` to allow for duck typing
if (options.schema instanceof Object)
this.schema = options.schema;
else if (opt)
this.schema = new Schema(Object.assign(opt, options));
else
throw new Error(`With a null YAML version, the { schema: Schema } option is required`);
}
// json & jsonArg are only used from toJSON()
toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {
const ctx = {
anchors: new Map(),
doc: this,
keep: !json,
mapAsMap: mapAsMap === true,
mapKeyWarned: false,
maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100
};
const res = toJS(this.contents, jsonArg ?? '', ctx);
if (typeof onAnchor === 'function')
for (const { count, res } of ctx.anchors.values())
onAnchor(res, count);
return typeof reviver === 'function'
? applyReviver(reviver, { '': res }, '', res)
: res;
}
/**
* A JSON representation of the document `contents`.
*
* @param jsonArg Used by `JSON.stringify` to indicate the array index or
* property name.
*/
toJSON(jsonArg, onAnchor) {
return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });
}
/** A YAML representation of the document. */
toString(options = {}) {
if (this.errors.length > 0)
throw new Error('Document with errors cannot be stringified');
if ('indent' in options &&
(!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {
const s = JSON.stringify(options.indent);
throw new Error(`"indent" option must be a positive integer, not ${s}`);
}
return stringifyDocument(this, options);
}
}
function assertCollection(contents) {
if (isCollection$1(contents))
return true;
throw new Error('Expected a YAML collection as document contents');
}
class YAMLError extends Error {
constructor(name, pos, code, message) {
super();
this.name = name;
this.code = code;
this.message = message;
this.pos = pos;
}
}
class YAMLParseError extends YAMLError {
constructor(pos, code, message) {
super('YAMLParseError', pos, code, message);
}
}
class YAMLWarning extends YAMLError {
constructor(pos, code, message) {
super('YAMLWarning', pos, code, message);
}
}
const prettifyError = (src, lc) => (error) => {
if (error.pos[0] === -1)
return;
error.linePos = error.pos.map(pos => lc.linePos(pos));
const { line, col } = error.linePos[0];
error.message += ` at line ${line}, column ${col}`;
let ci = col - 1;
let lineStr = src
.substring(lc.lineStarts[line - 1], lc.lineStarts[line])
.replace(/[\n\r]+$/, '');
// Trim to max 80 chars, keeping col position near the middle
if (ci >= 60 && lineStr.length > 80) {
const trimStart = Math.min(ci - 39, lineStr.length - 79);
lineStr = '…' + lineStr.substring(trimStart);
ci -= trimStart - 1;
}
if (lineStr.length > 80)
lineStr = lineStr.substring(0, 79) + '…';
// Include previous line in context if pointing at line start
if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {
// Regexp won't match if start is trimmed
let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);
if (prev.length > 80)
prev = prev.substring(0, 79) + '…\n';
lineStr = prev + lineStr;
}
if (/[^ ]/.test(lineStr)) {
let count = 1;
const end = error.linePos[1];
if (end && end.line === line && end.col > col) {
count = Math.max(1, Math.min(end.col - col, 80 - ci));
}
const pointer = ' '.repeat(ci) + '^'.repeat(count);
error.message += `:\n\n${lineStr}\n${pointer}\n`;
}
};
function resolveProps(tokens, { flow, indicator, next, offset, onError, parentIndent, startOnNewline }) {
let spaceBefore = false;
let atNewline = startOnNewline;
let hasSpace = startOnNewline;
let comment = '';
let commentSep = '';
let hasNewline = false;
let reqSpace = false;
let tab = null;
let anchor = null;
let tag = null;
let newlineAfterProp = null;
let comma = null;
let found = null;
let start = null;
for (const token of tokens) {
if (reqSpace) {
if (token.type !== 'space' &&
token.type !== 'newline' &&
token.type !== 'comma')
onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
reqSpace = false;
}
if (tab) {
if (atNewline && token.type !== 'comment' && token.type !== 'newline') {
onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
}
tab = null;
}
switch (token.type) {
case 'space':
// At the doc level, tabs at line start may be parsed
// as leading white space rather than indentation.
// In a flow collection, only the parser handles indent.
if (!flow &&
(indicator !== 'doc-start' || next?.type !== 'flow-collection') &&
token.source.includes('\t')) {
tab = token;
}
hasSpace = true;
break;
case 'comment': {
if (!hasSpace)
onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
const cb = token.source.substring(1) || ' ';
if (!comment)
comment = cb;
else
comment += commentSep + cb;
commentSep = '';
atNewline = false;
break;
}
case 'newline':
if (atNewline) {
if (comment)
comment += token.source;
else
spaceBefore = true;
}
else
commentSep += token.source;
atNewline = true;
hasNewline = true;
if (anchor || tag)
newlineAfterProp = token;
hasSpace = true;
break;
case 'anchor':
if (anchor)
onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');
if (token.source.endsWith(':'))
onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);
anchor = token;
if (start === null)
start = token.offset;
atNewline = false;
hasSpace = false;
reqSpace = true;
break;
case 'tag': {
if (tag)
onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');
tag = token;
if (start === null)
start = token.offset;
atNewline = false;
hasSpace = false;
reqSpace = true;
break;
}
case indicator:
// Could here handle preceding comments differently
if (anchor || tag)
onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);
if (found)
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);
found = token;
atNewline =
indicator === 'seq-item-ind' || indicator === 'explicit-key-ind';
hasSpace = false;
break;
case 'comma':
if (flow) {
if (comma)
onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);
comma = token;
atNewline = false;
hasSpace = false;
break;
}
// else fallthrough
default:
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);
atNewline = false;
hasSpace = false;
}
}
const last = tokens[tokens.length - 1];
const end = last ? last.offset + last.source.length : offset;
if (reqSpace &&
next &&
next.type !== 'space' &&
next.type !== 'newline' &&
next.type !== 'comma' &&
(next.type !== 'scalar' || next.source !== '')) {
onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
}
if (tab &&
((atNewline && tab.indent <= parentIndent) ||
next?.type === 'block-map' ||
next?.type === 'block-seq'))
onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
return {
comma,
found,
spaceBefore,
comment,
hasNewline,
anchor,
tag,
newlineAfterProp,
end,
start: start ?? end
};
}
function containsNewline(key) {
if (!key)
return null;
switch (key.type) {
case 'alias':
case 'scalar':
case 'double-quoted-scalar':
case 'single-quoted-scalar':
if (key.source.includes('\n'))
return true;
if (key.end)
for (const st of key.end)
if (st.type === 'newline')
return true;
return false;
case 'flow-collection':
for (const it of key.items) {
for (const st of it.start)
if (st.type === 'newline')
return true;
if (it.sep)
for (const st of it.sep)
if (st.type === 'newline')
return true;
if (containsNewline(it.key) || containsNewline(it.value))
return true;
}
return false;
default:
return true;
}
}
function flowIndentCheck(indent, fc, onError) {
if (fc?.type === 'flow-collection') {
const end = fc.end[0];
if (end.indent === indent &&
(end.source === ']' || end.source === '}') &&
containsNewline(fc)) {
const msg = 'Flow end indicator should be more indented than parent';
onError(end, 'BAD_INDENT', msg, true);
}
}
}
function mapIncludes(ctx, items, search) {
const { uniqueKeys } = ctx.options;
if (uniqueKeys === false)
return false;
const isEqual = typeof uniqueKeys === 'function'
? uniqueKeys
: (a, b) => a === b ||
(isScalar$1(a) &&
isScalar$1(b) &&
a.value === b.value &&
!(a.value === '<<' && ctx.schema.merge));
return items.some(pair => isEqual(pair.key, search));
}
const startColMsg = 'All mapping items must start at the same column';
function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, tag) {
const NodeClass = tag?.nodeClass ?? YAMLMap;
const map = new NodeClass(ctx.schema);
if (ctx.atRoot)
ctx.atRoot = false;
let offset = bm.offset;
let commentEnd = null;
for (const collItem of bm.items) {
const { start, key, sep, value } = collItem;
// key properties
const keyProps = resolveProps(start, {
indicator: 'explicit-key-ind',
next: key ?? sep?.[0],
offset,
onError,
parentIndent: bm.indent,
startOnNewline: true
});
const implicitKey = !keyProps.found;
if (implicitKey) {
if (key) {
if (key.type === 'block-seq')
onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');
else if ('indent' in key && key.indent !== bm.indent)
onError(offset, 'BAD_INDENT', startColMsg);
}
if (!keyProps.anchor && !keyProps.tag && !sep) {
commentEnd = keyProps.end;
if (keyProps.comment) {
if (map.comment)
map.comment += '\n' + keyProps.comment;
else
map.comment = keyProps.comment;
}
continue;
}
if (keyProps.newlineAfterProp || containsNewline(key)) {
onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');
}
}
else if (keyProps.found?.indent !== bm.indent) {
onError(offset, 'BAD_INDENT', startColMsg);
}
// key value
const keyStart = keyProps.end;
const keyNode = key
? composeNode(ctx, key, keyProps, onError)
: composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);
if (ctx.schema.compat)
flowIndentCheck(bm.indent, key, onError);
if (mapIncludes(ctx, map.items, keyNode))
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
// value properties
const valueProps = resolveProps(sep ?? [], {
indicator: 'map-value-ind',
next: value,
offset: keyNode.range[2],
onError,
parentIndent: bm.indent,
startOnNewline: !key || key.type === 'block-scalar'
});
offset = valueProps.end;
if (valueProps.found) {
if (implicitKey) {
if (value?.type === 'block-map' && !valueProps.hasNewline)
onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');
if (ctx.options.strict &&
keyProps.start < valueProps.found.offset - 1024)
onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');
}
// value value
const valueNode = value
? composeNode(ctx, value, valueProps, onError)
: composeEmptyNode(ctx, offset, sep, null, valueProps, onError);
if (ctx.schema.compat)
flowIndentCheck(bm.indent, value, onError);
offset = valueNode.range[2];
const pair = new Pair(keyNode, valueNode);
if (ctx.options.keepSourceTokens)
pair.srcToken = collItem;
map.items.push(pair);
}
else {
// key with no value
if (implicitKey)
onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');
if (valueProps.comment) {
if (keyNode.comment)
keyNode.comment += '\n' + valueProps.comment;
else
keyNode.comment = valueProps.comment;
}
const pair = new Pair(keyNode);
if (ctx.options.keepSourceTokens)
pair.srcToken = collItem;
map.items.push(pair);
}
}
if (commentEnd && commentEnd < offset)
onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content');
map.range = [bm.offset, offset, commentEnd ?? offset];
return map;
}
function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, tag) {
const NodeClass = tag?.nodeClass ?? YAMLSeq;
const seq = new NodeClass(ctx.schema);
if (ctx.atRoot)
ctx.atRoot = false;
let offset = bs.offset;
let commentEnd = null;
for (const { start, value } of bs.items) {
const props = resolveProps(start, {
indicator: 'seq-item-ind',
next: value,
offset,
onError,
parentIndent: bs.indent,
startOnNewline: true
});
if (!props.found) {
if (props.anchor || props.tag || value) {
if (value && value.type === 'block-seq')
onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column');
else
onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');
}
else {
commentEnd = props.end;
if (props.comment)
seq.comment = props.comment;
continue;
}
}
const node = value
? composeNode(ctx, value, props, onError)
: composeEmptyNode(ctx, props.end, start, null, props, onError);
if (ctx.schema.compat)
flowIndentCheck(bs.indent, value, onError);
offset = node.range[2];
seq.items.push(node);
}
seq.range = [bs.offset, offset, commentEnd ?? offset];
return seq;
}
function resolveEnd(end, offset, reqSpace, onError) {
let comment = '';
if (end) {
let hasSpace = false;
let sep = '';
for (const token of end) {
const { source, type } = token;
switch (type) {
case 'space':
hasSpace = true;
break;
case 'comment': {
if (reqSpace && !hasSpace)
onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
const cb = source.substring(1) || ' ';
if (!comment)
comment = cb;
else
comment += sep + cb;
sep = '';
break;
}
case 'newline':
if (comment)
sep += source;
hasSpace = true;
break;
default:
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);
}
offset += source.length;
}
}
return { comment, offset };
}
const blockMsg = 'Block collections are not allowed within flow collections';
const isBlock$1 = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');
function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError, tag) {
const isMap = fc.start.source === '{';
const fcName = isMap ? 'flow map' : 'flow sequence';
const NodeClass = (tag?.nodeClass ?? (isMap ? YAMLMap : YAMLSeq));
const coll = new NodeClass(ctx.schema);
coll.flow = true;
const atRoot = ctx.atRoot;
if (atRoot)
ctx.atRoot = false;
let offset = fc.offset + fc.start.source.length;
for (let i = 0; i < fc.items.length; ++i) {
const collItem = fc.items[i];
const { start, key, sep, value } = collItem;
const props = resolveProps(start, {
flow: fcName,
indicator: 'explicit-key-ind',
next: key ?? sep?.[0],
offset,
onError,
parentIndent: fc.indent,
startOnNewline: false
});
if (!props.found) {
if (!props.anchor && !props.tag && !sep && !value) {
if (i === 0 && props.comma)
onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
else if (i < fc.items.length - 1)
onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);
if (props.comment) {
if (coll.comment)
coll.comment += '\n' + props.comment;
else
coll.comment = props.comment;
}
offset = props.end;
continue;
}
if (!isMap && ctx.options.strict && containsNewline(key))
onError(key, // checked by containsNewline()
'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
}
if (i === 0) {
if (props.comma)
onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
}
else {
if (!props.comma)
onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);
if (props.comment) {
let prevItemComment = '';
loop: for (const st of start) {
switch (st.type) {
case 'comma':
case 'space':
break;
case 'comment':
prevItemComment = st.source.substring(1);
break loop;
default:
break loop;
}
}
if (prevItemComment) {
let prev = coll.items[coll.items.length - 1];
if (isPair(prev))
prev = prev.value ?? prev.key;
if (prev.comment)
prev.comment += '\n' + prevItemComment;
else
prev.comment = prevItemComment;
props.comment = props.comment.substring(prevItemComment.length + 1);
}
}
}
if (!isMap && !sep && !props.found) {
// item is a value in a seq
// → key & sep are empty, start does not include ? or :
const valueNode = value
? composeNode(ctx, value, props, onError)
: composeEmptyNode(ctx, props.end, sep, null, props, onError);
coll.items.push(valueNode);
offset = valueNode.range[2];
if (isBlock$1(value))
onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
}
else {
// item is a key+value pair
// key value
const keyStart = props.end;
const keyNode = key
? composeNode(ctx, key, props, onError)
: composeEmptyNode(ctx, keyStart, start, null, props, onError);
if (isBlock$1(key))
onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);
// value properties
const valueProps = resolveProps(sep ?? [], {
flow: fcName,
indicator: 'map-value-ind',
next: value,
offset: keyNode.range[2],
onError,
parentIndent: fc.indent,
startOnNewline: false
});
if (valueProps.found) {
if (!isMap && !props.found && ctx.options.strict) {
if (sep)
for (const st of sep) {
if (st === valueProps.found)
break;
if (st.type === 'newline') {
onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
break;
}
}
if (props.start < valueProps.found.offset - 1024)
onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');
}
}
else if (value) {
if ('source' in value && value.source && value.source[0] === ':')
onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);
else
onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);
}
// value value
const valueNode = value
? composeNode(ctx, value, valueProps, onError)
: valueProps.found
? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)
: null;
if (valueNode) {
if (isBlock$1(value))
onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
}
else if (valueProps.comment) {
if (keyNode.comment)
keyNode.comment += '\n' + valueProps.comment;
else
keyNode.comment = valueProps.comment;
}
const pair = new Pair(keyNode, valueNode);
if (ctx.options.keepSourceTokens)
pair.srcToken = collItem;
if (isMap) {
const map = coll;
if (mapIncludes(ctx, map.items, keyNode))
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
map.items.push(pair);
}
else {
const map = new YAMLMap(ctx.schema);
map.flow = true;
map.items.push(pair);
coll.items.push(map);
}
offset = valueNode ? valueNode.range[2] : valueProps.end;
}
}
const expectedEnd = isMap ? '}' : ']';
const [ce, ...ee] = fc.end;
let cePos = offset;
if (ce && ce.source === expectedEnd)
cePos = ce.offset + ce.source.length;
else {
const name = fcName[0].toUpperCase() + fcName.substring(1);
const msg = atRoot
? `${name} must end with a ${expectedEnd}`
: `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;
onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);
if (ce && ce.source.length !== 1)
ee.unshift(ce);
}
if (ee.length > 0) {
const end = resolveEnd(ee, cePos, ctx.options.strict, onError);
if (end.comment) {
if (coll.comment)
coll.comment += '\n' + end.comment;
else
coll.comment = end.comment;
}
coll.range = [fc.offset, cePos, end.offset];
}
else {
coll.range = [fc.offset, cePos, cePos];
}
return coll;
}
function resolveCollection(CN, ctx, token, onError, tagName, tag) {
const coll = token.type === 'block-map'
? resolveBlockMap(CN, ctx, token, onError, tag)
: token.type === 'block-seq'
? resolveBlockSeq(CN, ctx, token, onError, tag)
: resolveFlowCollection(CN, ctx, token, onError, tag);
const Coll = coll.constructor;
// If we got a tagName matching the class, or the tag name is '!',
// then use the tagName from the node class used to create it.
if (tagName === '!' || tagName === Coll.tagName) {
coll.tag = Coll.tagName;
return coll;
}
if (tagName)
coll.tag = tagName;
return coll;
}
function composeCollection(CN, ctx, token, props, onError) {
const tagToken = props.tag;
const tagName = !tagToken
? null
: ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));
if (token.type === 'block-seq') {
const { anchor, newlineAfterProp: nl } = props;
const lastProp = anchor && tagToken
? anchor.offset > tagToken.offset
? anchor
: tagToken
: (anchor ?? tagToken);
if (lastProp && (!nl || nl.offset < lastProp.offset)) {
const message = 'Missing newline after block sequence props';
onError(lastProp, 'MISSING_CHAR', message);
}
}
const expType = token.type === 'block-map'
? 'map'
: token.type === 'block-seq'
? 'seq'
: token.start.source === '{'
? 'map'
: 'seq';
// shortcut: check if it's a generic YAMLMap or YAMLSeq
// before jumping into the custom tag logic.
if (!tagToken ||
!tagName ||
tagName === '!' ||
(tagName === YAMLMap.tagName && expType === 'map') ||
(tagName === YAMLSeq.tagName && expType === 'seq')) {
return resolveCollection(CN, ctx, token, onError, tagName);
}
let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType);
if (!tag) {
const kt = ctx.schema.knownTags[tagName];
if (kt && kt.collection === expType) {
ctx.schema.tags.push(Object.assign({}, kt, { default: false }));
tag = kt;
}
else {
if (kt?.collection) {
onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection}`, true);
}
else {
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);
}
return resolveCollection(CN, ctx, token, onError, tagName);
}
}
const coll = resolveCollection(CN, ctx, token, onError, tagName, tag);
const res = tag.resolve?.(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options) ?? coll;
const node = isNode$1(res)
? res
: new Scalar(res);
node.range = coll.range;
node.tag = tagName;
if (tag?.format)
node.format = tag.format;
return node;
}
function resolveBlockScalar(ctx, scalar, onError) {
const start = scalar.offset;
const header = parseBlockScalarHeader(scalar, ctx.options.strict, onError);
if (!header)
return { value: '', type: null, comment: '', range: [start, start, start] };
const type = header.mode === '>' ? Scalar.BLOCK_FOLDED : Scalar.BLOCK_LITERAL;
const lines = scalar.source ? splitLines(scalar.source) : [];
// determine the end of content & start of chomping
let chompStart = lines.length;
for (let i = lines.length - 1; i >= 0; --i) {
const content = lines[i][1];
if (content === '' || content === '\r')
chompStart = i;
else
break;
}
// shortcut for empty contents
if (chompStart === 0) {
const value = header.chomp === '+' && lines.length > 0
? '\n'.repeat(Math.max(1, lines.length - 1))
: '';
let end = start + header.length;
if (scalar.source)
end += scalar.source.length;
return { value, type, comment: header.comment, range: [start, end, end] };
}
// find the indentation level to trim from start
let trimIndent = scalar.indent + header.indent;
let offset = scalar.offset + header.length;
let contentStart = 0;
for (let i = 0; i < chompStart; ++i) {
const [indent, content] = lines[i];
if (content === '' || content === '\r') {
if (header.indent === 0 && indent.length > trimIndent)
trimIndent = indent.length;
}
else {
if (indent.length < trimIndent) {
const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';
onError(offset + indent.length, 'MISSING_CHAR', message);
}
if (header.indent === 0)
trimIndent = indent.length;
contentStart = i;
if (trimIndent === 0 && !ctx.atRoot) {
const message = 'Block scalar values in collections must be indented';
onError(offset, 'BAD_INDENT', message);
}
break;
}
offset += indent.length + content.length + 1;
}
// include trailing more-indented empty lines in content
for (let i = lines.length - 1; i >= chompStart; --i) {
if (lines[i][0].length > trimIndent)
chompStart = i + 1;
}
let value = '';
let sep = '';
let prevMoreIndented = false;
// leading whitespace is kept intact
for (let i = 0; i < contentStart; ++i)
value += lines[i][0].slice(trimIndent) + '\n';
for (let i = contentStart; i < chompStart; ++i) {
let [indent, content] = lines[i];
offset += indent.length + content.length + 1;
const crlf = content[content.length - 1] === '\r';
if (crlf)
content = content.slice(0, -1);
/* istanbul ignore if already caught in lexer */
if (content && indent.length < trimIndent) {
const src = header.indent
? 'explicit indentation indicator'
: 'first line';
const message = `Block scalar lines must not be less indented than their ${src}`;
onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);
indent = '';
}
if (type === Scalar.BLOCK_LITERAL) {
value += sep + indent.slice(trimIndent) + content;
sep = '\n';
}
else if (indent.length > trimIndent || content[0] === '\t') {
// more-indented content within a folded block
if (sep === ' ')
sep = '\n';
else if (!prevMoreIndented && sep === '\n')
sep = '\n\n';
value += sep + indent.slice(trimIndent) + content;
sep = '\n';
prevMoreIndented = true;
}
else if (content === '') {
// empty line
if (sep === '\n')
value += '\n';
else
sep = '\n';
}
else {
value += sep + content;
sep = ' ';
prevMoreIndented = false;
}
}
switch (header.chomp) {
case '-':
break;
case '+':
for (let i = chompStart; i < lines.length; ++i)
value += '\n' + lines[i][0].slice(trimIndent);
if (value[value.length - 1] !== '\n')
value += '\n';
break;
default:
value += '\n';
}
const end = start + header.length + scalar.source.length;
return { value, type, comment: header.comment, range: [start, end, end] };
}
function parseBlockScalarHeader({ offset, props }, strict, onError) {
/* istanbul ignore if should not happen */
if (props[0].type !== 'block-scalar-header') {
onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');
return null;
}
const { source } = props[0];
const mode = source[0];
let indent = 0;
let chomp = '';
let error = -1;
for (let i = 1; i < source.length; ++i) {
const ch = source[i];
if (!chomp && (ch === '-' || ch === '+'))
chomp = ch;
else {
const n = Number(ch);
if (!indent && n)
indent = n;
else if (error === -1)
error = offset + i;
}
}
if (error !== -1)
onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);
let hasSpace = false;
let comment = '';
let length = source.length;
for (let i = 1; i < props.length; ++i) {
const token = props[i];
switch (token.type) {
case 'space':
hasSpace = true;
// fallthrough
case 'newline':
length += token.source.length;
break;
case 'comment':
if (strict && !hasSpace) {
const message = 'Comments must be separated from other tokens by white space characters';
onError(token, 'MISSING_CHAR', message);
}
length += token.source.length;
comment = token.source.substring(1);
break;
case 'error':
onError(token, 'UNEXPECTED_TOKEN', token.message);
length += token.source.length;
break;
/* istanbul ignore next should not happen */
default: {
const message = `Unexpected token in block scalar header: ${token.type}`;
onError(token, 'UNEXPECTED_TOKEN', message);
const ts = token.source;
if (ts && typeof ts === 'string')
length += ts.length;
}
}
}
return { mode, indent, chomp, comment, length };
}
/** @returns Array of lines split up as `[indent, content]` */
function splitLines(source) {
const split = source.split(/\n( *)/);
const first = split[0];
const m = first.match(/^( *)/);
const line0 = m?.[1]
? [m[1], first.slice(m[1].length)]
: ['', first];
const lines = [line0];
for (let i = 1; i < split.length; i += 2)
lines.push([split[i], split[i + 1]]);
return lines;
}
function resolveFlowScalar(scalar, strict, onError) {
const { offset, type, source, end } = scalar;
let _type;
let value;
const _onError = (rel, code, msg) => onError(offset + rel, code, msg);
switch (type) {
case 'scalar':
_type = Scalar.PLAIN;
value = plainValue(source, _onError);
break;
case 'single-quoted-scalar':
_type = Scalar.QUOTE_SINGLE;
value = singleQuotedValue(source, _onError);
break;
case 'double-quoted-scalar':
_type = Scalar.QUOTE_DOUBLE;
value = doubleQuotedValue(source, _onError);
break;
/* istanbul ignore next should not happen */
default:
onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);
return {
value: '',
type: null,
comment: '',
range: [offset, offset + source.length, offset + source.length]
};
}
const valueEnd = offset + source.length;
const re = resolveEnd(end, valueEnd, strict, onError);
return {
value,
type: _type,
comment: re.comment,
range: [offset, valueEnd, re.offset]
};
}
function plainValue(source, onError) {
let badChar = '';
switch (source[0]) {
/* istanbul ignore next should not happen */
case '\t':
badChar = 'a tab character';
break;
case ',':
badChar = 'flow indicator character ,';
break;
case '%':
badChar = 'directive indicator character %';
break;
case '|':
case '>': {
badChar = `block scalar indicator ${source[0]}`;
break;
}
case '@':
case '`': {
badChar = `reserved character ${source[0]}`;
break;
}
}
if (badChar)
onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);
return foldLines(source);
}
function singleQuotedValue(source, onError) {
if (source[source.length - 1] !== "'" || source.length === 1)
onError(source.length, 'MISSING_CHAR', "Missing closing 'quote");
return foldLines(source.slice(1, -1)).replace(/''/g, "'");
}
function foldLines(source) {
/**
* The negative lookbehind here and in the `re` RegExp is to
* prevent causing a polynomial search time in certain cases.
*
* The try-catch is for Safari, which doesn't support this yet:
* https://caniuse.com/js-regexp-lookbehind
*/
let first, line;
try {
first = new RegExp('(.*?)(?<![ \t])[ \t]*\r?\n', 'sy');
line = new RegExp('[ \t]*(.*?)(?:(?<![ \t])[ \t]*)?\r?\n', 'sy');
}
catch (_) {
first = /(.*?)[ \t]*\r?\n/sy;
line = /[ \t]*(.*?)[ \t]*\r?\n/sy;
}
let match = first.exec(source);
if (!match)
return source;
let res = match[1];
let sep = ' ';
let pos = first.lastIndex;
line.lastIndex = pos;
while ((match = line.exec(source))) {
if (match[1] === '') {
if (sep === '\n')
res += sep;
else
sep = '\n';
}
else {
res += sep + match[1];
sep = ' ';
}
pos = line.lastIndex;
}
const last = /[ \t]*(.*)/sy;
last.lastIndex = pos;
match = last.exec(source);
return res + sep + (match?.[1] ?? '');
}
function doubleQuotedValue(source, onError) {
let res = '';
for (let i = 1; i < source.length - 1; ++i) {
const ch = source[i];
if (ch === '\r' && source[i + 1] === '\n')
continue;
if (ch === '\n') {
const { fold, offset } = foldNewline(source, i);
res += fold;
i = offset;
}
else if (ch === '\\') {
let next = source[++i];
const cc = escapeCodes[next];
if (cc)
res += cc;
else if (next === '\n') {
// skip escaped newlines, but still trim the following line
next = source[i + 1];
while (next === ' ' || next === '\t')
next = source[++i + 1];
}
else if (next === '\r' && source[i + 1] === '\n') {
// skip escaped CRLF newlines, but still trim the following line
next = source[++i + 1];
while (next === ' ' || next === '\t')
next = source[++i + 1];
}
else if (next === 'x' || next === 'u' || next === 'U') {
const length = { x: 2, u: 4, U: 8 }[next];
res += parseCharCode(source, i + 1, length, onError);
i += length;
}
else {
const raw = source.substr(i - 1, 2);
onError(i - 1, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
res += raw;
}
}
else if (ch === ' ' || ch === '\t') {
// trim trailing whitespace
const wsStart = i;
let next = source[i + 1];
while (next === ' ' || next === '\t')
next = source[++i + 1];
if (next !== '\n' && !(next === '\r' && source[i + 2] === '\n'))
res += i > wsStart ? source.slice(wsStart, i + 1) : ch;
}
else {
res += ch;
}
}
if (source[source.length - 1] !== '"' || source.length === 1)
onError(source.length, 'MISSING_CHAR', 'Missing closing "quote');
return res;
}
/**
* Fold a single newline into a space, multiple newlines to N - 1 newlines.
* Presumes `source[offset] === '\n'`
*/
function foldNewline(source, offset) {
let fold = '';
let ch = source[offset + 1];
while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') {
if (ch === '\r' && source[offset + 2] !== '\n')
break;
if (ch === '\n')
fold += '\n';
offset += 1;
ch = source[offset + 1];
}
if (!fold)
fold = ' ';
return { fold, offset };
}
const escapeCodes = {
'0': '\0', // null character
a: '\x07', // bell character
b: '\b', // backspace
e: '\x1b', // escape character
f: '\f', // form feed
n: '\n', // line feed
r: '\r', // carriage return
t: '\t', // horizontal tab
v: '\v', // vertical tab
N: '\u0085', // Unicode next line
_: '\u00a0', // Unicode non-breaking space
L: '\u2028', // Unicode line separator
P: '\u2029', // Unicode paragraph separator
' ': ' ',
'"': '"',
'/': '/',
'\\': '\\',
'\t': '\t'
};
function parseCharCode(source, offset, length, onError) {
const cc = source.substr(offset, length);
const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);
const code = ok ? parseInt(cc, 16) : NaN;
if (isNaN(code)) {
const raw = source.substr(offset - 2, length + 2);
onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
return raw;
}
return String.fromCodePoint(code);
}
function composeScalar(ctx, token, tagToken, onError) {
const { value, type, comment, range } = token.type === 'block-scalar'
? resolveBlockScalar(ctx, token, onError)
: resolveFlowScalar(token, ctx.options.strict, onError);
const tagName = tagToken
? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))
: null;
const tag = tagToken && tagName
? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)
: token.type === 'scalar'
? findScalarTagByTest(ctx, value, token, onError)
: ctx.schema[SCALAR$1];
let scalar;
try {
const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);
scalar = isScalar$1(res) ? res : new Scalar(res);
}
catch (error) {
const msg = error instanceof Error ? error.message : String(error);
onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg);
scalar = new Scalar(value);
}
scalar.range = range;
scalar.source = value;
if (type)
scalar.type = type;
if (tagName)
scalar.tag = tagName;
if (tag.format)
scalar.format = tag.format;
if (comment)
scalar.comment = comment;
return scalar;
}
function findScalarTagByName(schema, value, tagName, tagToken, onError) {
if (tagName === '!')
return schema[SCALAR$1]; // non-specific tag
const matchWithTest = [];
for (const tag of schema.tags) {
if (!tag.collection && tag.tag === tagName) {
if (tag.default && tag.test)
matchWithTest.push(tag);
else
return tag;
}
}
for (const tag of matchWithTest)
if (tag.test?.test(value))
return tag;
const kt = schema.knownTags[tagName];
if (kt && !kt.collection) {
// Ensure that the known tag is available for stringifying,
// but does not get used by default.
schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));
return kt;
}
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');
return schema[SCALAR$1];
}
function findScalarTagByTest({ directives, schema }, value, token, onError) {
const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[SCALAR$1];
if (schema.compat) {
const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??
schema[SCALAR$1];
if (tag.tag !== compat.tag) {
const ts = directives.tagString(tag.tag);
const cs = directives.tagString(compat.tag);
const msg = `Value may be parsed as either ${ts} or ${cs}`;
onError(token, 'TAG_RESOLVE_FAILED', msg, true);
}
}
return tag;
}
function emptyScalarPosition(offset, before, pos) {
if (before) {
if (pos === null)
pos = before.length;
for (let i = pos - 1; i >= 0; --i) {
let st = before[i];
switch (st.type) {
case 'space':
case 'comment':
case 'newline':
offset -= st.source.length;
continue;
}
// Technically, an empty scalar is immediately after the last non-empty
// node, but it's more useful to place it after any whitespace.
st = before[++i];
while (st?.type === 'space') {
offset += st.source.length;
st = before[++i];
}
break;
}
}
return offset;
}
const CN = { composeNode, composeEmptyNode };
function composeNode(ctx, token, props, onError) {
const { spaceBefore, comment, anchor, tag } = props;
let node;
let isSrcToken = true;
switch (token.type) {
case 'alias':
node = composeAlias(ctx, token, onError);
if (anchor || tag)
onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');
break;
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
case 'block-scalar':
node = composeScalar(ctx, token, tag, onError);
if (anchor)
node.anchor = anchor.source.substring(1);
break;
case 'block-map':
case 'block-seq':
case 'flow-collection':
node = composeCollection(CN, ctx, token, props, onError);
if (anchor)
node.anchor = anchor.source.substring(1);
break;
default: {
const message = token.type === 'error'
? token.message
: `Unsupported token (type: ${token.type})`;
onError(token, 'UNEXPECTED_TOKEN', message);
node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);
isSrcToken = false;
}
}
if (anchor && node.anchor === '')
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
if (spaceBefore)
node.spaceBefore = true;
if (comment) {
if (token.type === 'scalar' && token.source === '')
node.comment = comment;
else
node.commentBefore = comment;
}
// @ts-expect-error Type checking misses meaning of isSrcToken
if (ctx.options.keepSourceTokens && isSrcToken)
node.srcToken = token;
return node;
}
function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) {
const token = {
type: 'scalar',
offset: emptyScalarPosition(offset, before, pos),
indent: -1,
source: ''
};
const node = composeScalar(ctx, token, tag, onError);
if (anchor) {
node.anchor = anchor.source.substring(1);
if (node.anchor === '')
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
}
if (spaceBefore)
node.spaceBefore = true;
if (comment) {
node.comment = comment;
node.range[2] = end;
}
return node;
}
function composeAlias({ options }, { offset, source, end }, onError) {
const alias = new Alias(source.substring(1));
if (alias.source === '')
onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');
if (alias.source.endsWith(':'))
onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);
const valueEnd = offset + source.length;
const re = resolveEnd(end, valueEnd, options.strict, onError);
alias.range = [offset, valueEnd, re.offset];
if (re.comment)
alias.comment = re.comment;
return alias;
}
function composeDoc(options, directives, { offset, start, value, end }, onError) {
const opts = Object.assign({ _directives: directives }, options);
const doc = new Document(undefined, opts);
const ctx = {
atRoot: true,
directives: doc.directives,
options: doc.options,
schema: doc.schema
};
const props = resolveProps(start, {
indicator: 'doc-start',
next: value ?? end?.[0],
offset,
onError,
parentIndent: 0,
startOnNewline: true
});
if (props.found) {
doc.directives.docStart = true;
if (value &&
(value.type === 'block-map' || value.type === 'block-seq') &&
!props.hasNewline)
onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');
}
// @ts-expect-error If Contents is set, let's trust the user
doc.contents = value
? composeNode(ctx, value, props, onError)
: composeEmptyNode(ctx, props.end, start, null, props, onError);
const contentEnd = doc.contents.range[2];
const re = resolveEnd(end, contentEnd, false, onError);
if (re.comment)
doc.comment = re.comment;
doc.range = [offset, contentEnd, re.offset];
return doc;
}
function getErrorPos(src) {
if (typeof src === 'number')
return [src, src + 1];
if (Array.isArray(src))
return src.length === 2 ? src : [src[0], src[1]];
const { offset, source } = src;
return [offset, offset + (typeof source === 'string' ? source.length : 1)];
}
function parsePrelude(prelude) {
let comment = '';
let atComment = false;
let afterEmptyLine = false;
for (let i = 0; i < prelude.length; ++i) {
const source = prelude[i];
switch (source[0]) {
case '#':
comment +=
(comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') +
(source.substring(1) || ' ');
atComment = true;
afterEmptyLine = false;
break;
case '%':
if (prelude[i + 1]?.[0] !== '#')
i += 1;
atComment = false;
break;
default:
// This may be wrong after doc-end, but in that case it doesn't matter
if (!atComment)
afterEmptyLine = true;
atComment = false;
}
}
return { comment, afterEmptyLine };
}
/**
* Compose a stream of CST nodes into a stream of YAML Documents.
*
* ```ts
* import { Composer, Parser } from 'yaml'
*
* const src: string = ...
* const tokens = new Parser().parse(src)
* const docs = new Composer().compose(tokens)
* ```
*/
class Composer {
constructor(options = {}) {
this.doc = null;
this.atDirectives = false;
this.prelude = [];
this.errors = [];
this.warnings = [];
this.onError = (source, code, message, warning) => {
const pos = getErrorPos(source);
if (warning)
this.warnings.push(new YAMLWarning(pos, code, message));
else
this.errors.push(new YAMLParseError(pos, code, message));
};
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
this.directives = new Directives({ version: options.version || '1.2' });
this.options = options;
}
decorate(doc, afterDoc) {
const { comment, afterEmptyLine } = parsePrelude(this.prelude);
//console.log({ dc: doc.comment, prelude, comment })
if (comment) {
const dc = doc.contents;
if (afterDoc) {
doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment;
}
else if (afterEmptyLine || doc.directives.docStart || !dc) {
doc.commentBefore = comment;
}
else if (isCollection$1(dc) && !dc.flow && dc.items.length > 0) {
let it = dc.items[0];
if (isPair(it))
it = it.key;
const cb = it.commentBefore;
it.commentBefore = cb ? `${comment}\n${cb}` : comment;
}
else {
const cb = dc.commentBefore;
dc.commentBefore = cb ? `${comment}\n${cb}` : comment;
}
}
if (afterDoc) {
Array.prototype.push.apply(doc.errors, this.errors);
Array.prototype.push.apply(doc.warnings, this.warnings);
}
else {
doc.errors = this.errors;
doc.warnings = this.warnings;
}
this.prelude = [];
this.errors = [];
this.warnings = [];
}
/**
* Current stream status information.
*
* Mostly useful at the end of input for an empty stream.
*/
streamInfo() {
return {
comment: parsePrelude(this.prelude).comment,
directives: this.directives,
errors: this.errors,
warnings: this.warnings
};
}
/**
* Compose tokens into documents.
*
* @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
* @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
*/
*compose(tokens, forceDoc = false, endOffset = -1) {
for (const token of tokens)
yield* this.next(token);
yield* this.end(forceDoc, endOffset);
}
/** Advance the composer by one CST token. */
*next(token) {
switch (token.type) {
case 'directive':
this.directives.add(token.source, (offset, message, warning) => {
const pos = getErrorPos(token);
pos[0] += offset;
this.onError(pos, 'BAD_DIRECTIVE', message, warning);
});
this.prelude.push(token.source);
this.atDirectives = true;
break;
case 'document': {
const doc = composeDoc(this.options, this.directives, token, this.onError);
if (this.atDirectives && !doc.directives.docStart)
this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');
this.decorate(doc, false);
if (this.doc)
yield this.doc;
this.doc = doc;
this.atDirectives = false;
break;
}
case 'byte-order-mark':
case 'space':
break;
case 'comment':
case 'newline':
this.prelude.push(token.source);
break;
case 'error': {
const msg = token.source
? `${token.message}: ${JSON.stringify(token.source)}`
: token.message;
const error = new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);
if (this.atDirectives || !this.doc)
this.errors.push(error);
else
this.doc.errors.push(error);
break;
}
case 'doc-end': {
if (!this.doc) {
const msg = 'Unexpected doc-end without preceding document';
this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));
break;
}
this.doc.directives.docEnd = true;
const end = resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);
this.decorate(this.doc, true);
if (end.comment) {
const dc = this.doc.comment;
this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment;
}
this.doc.range[2] = end.offset;
break;
}
default:
this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));
}
}
/**
* Call at end of input to yield any remaining document.
*
* @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
* @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
*/
*end(forceDoc = false, endOffset = -1) {
if (this.doc) {
this.decorate(this.doc, true);
yield this.doc;
this.doc = null;
}
else if (forceDoc) {
const opts = Object.assign({ _directives: this.directives }, this.options);
const doc = new Document(undefined, opts);
if (this.atDirectives)
this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');
doc.range = [0, endOffset, endOffset];
this.decorate(doc, false);
yield doc;
}
}
}
function resolveAsScalar(token, strict = true, onError) {
if (token) {
const _onError = (pos, code, message) => {
const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;
if (onError)
onError(offset, code, message);
else
throw new YAMLParseError([offset, offset + 1], code, message);
};
switch (token.type) {
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
return resolveFlowScalar(token, strict, _onError);
case 'block-scalar':
return resolveBlockScalar({ options: { strict } }, token, _onError);
}
}
return null;
}
/**
* Create a new scalar token with `value`
*
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
* as this function does not support any schema operations and won't check for such conflicts.
*
* @param value The string representation of the value, which will have its content properly indented.
* @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
* @param context.indent The indent level of the token.
* @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.
* @param context.offset The offset position of the token.
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
*/
function createScalarToken(value, context) {
const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;
const source = stringifyString({ type, value }, {
implicitKey,
indent: indent > 0 ? ' '.repeat(indent) : '',
inFlow,
options: { blockQuote: true, lineWidth: -1 }
});
const end = context.end ?? [
{ type: 'newline', offset: -1, indent, source: '\n' }
];
switch (source[0]) {
case '|':
case '>': {
const he = source.indexOf('\n');
const head = source.substring(0, he);
const body = source.substring(he + 1) + '\n';
const props = [
{ type: 'block-scalar-header', offset, indent, source: head }
];
if (!addEndtoBlockProps(props, end))
props.push({ type: 'newline', offset: -1, indent, source: '\n' });
return { type: 'block-scalar', offset, indent, props, source: body };
}
case '"':
return { type: 'double-quoted-scalar', offset, indent, source, end };
case "'":
return { type: 'single-quoted-scalar', offset, indent, source, end };
default:
return { type: 'scalar', offset, indent, source, end };
}
}
/**
* Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.
*
* Best efforts are made to retain any comments previously associated with the `token`,
* though all contents within a collection's `items` will be overwritten.
*
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
* as this function does not support any schema operations and won't check for such conflicts.
*
* @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.
* @param value The string representation of the value, which will have its content properly indented.
* @param context.afterKey In most cases, values after a key should have an additional level of indentation.
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
* @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
*/
function setScalarValue(token, value, context = {}) {
let { afterKey = false, implicitKey = false, inFlow = false, type } = context;
let indent = 'indent' in token ? token.indent : null;
if (afterKey && typeof indent === 'number')
indent += 2;
if (!type)
switch (token.type) {
case 'single-quoted-scalar':
type = 'QUOTE_SINGLE';
break;
case 'double-quoted-scalar':
type = 'QUOTE_DOUBLE';
break;
case 'block-scalar': {
const header = token.props[0];
if (header.type !== 'block-scalar-header')
throw new Error('Invalid block scalar header');
type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';
break;
}
default:
type = 'PLAIN';
}
const source = stringifyString({ type, value }, {
implicitKey: implicitKey || indent === null,
indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',
inFlow,
options: { blockQuote: true, lineWidth: -1 }
});
switch (source[0]) {
case '|':
case '>':
setBlockScalarValue(token, source);
break;
case '"':
setFlowScalarValue(token, source, 'double-quoted-scalar');
break;
case "'":
setFlowScalarValue(token, source, 'single-quoted-scalar');
break;
default:
setFlowScalarValue(token, source, 'scalar');
}
}
function setBlockScalarValue(token, source) {
const he = source.indexOf('\n');
const head = source.substring(0, he);
const body = source.substring(he + 1) + '\n';
if (token.type === 'block-scalar') {
const header = token.props[0];
if (header.type !== 'block-scalar-header')
throw new Error('Invalid block scalar header');
header.source = head;
token.source = body;
}
else {
const { offset } = token;
const indent = 'indent' in token ? token.indent : -1;
const props = [
{ type: 'block-scalar-header', offset, indent, source: head }
];
if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))
props.push({ type: 'newline', offset: -1, indent, source: '\n' });
for (const key of Object.keys(token))
if (key !== 'type' && key !== 'offset')
delete token[key];
Object.assign(token, { type: 'block-scalar', indent, props, source: body });
}
}
/** @returns `true` if last token is a newline */
function addEndtoBlockProps(props, end) {
if (end)
for (const st of end)
switch (st.type) {
case 'space':
case 'comment':
props.push(st);
break;
case 'newline':
props.push(st);
return true;
}
return false;
}
function setFlowScalarValue(token, source, type) {
switch (token.type) {
case 'scalar':
case 'double-quoted-scalar':
case 'single-quoted-scalar':
token.type = type;
token.source = source;
break;
case 'block-scalar': {
const end = token.props.slice(1);
let oa = source.length;
if (token.props[0].type === 'block-scalar-header')
oa -= token.props[0].source.length;
for (const tok of end)
tok.offset += oa;
delete token.props;
Object.assign(token, { type, source, end });
break;
}
case 'block-map':
case 'block-seq': {
const offset = token.offset + source.length;
const nl = { type: 'newline', offset, indent: token.indent, source: '\n' };
delete token.items;
Object.assign(token, { type, source, end: [nl] });
break;
}
default: {
const indent = 'indent' in token ? token.indent : -1;
const end = 'end' in token && Array.isArray(token.end)
? token.end.filter(st => st.type === 'space' ||
st.type === 'comment' ||
st.type === 'newline')
: [];
for (const key of Object.keys(token))
if (key !== 'type' && key !== 'offset')
delete token[key];
Object.assign(token, { type, indent, source, end });
}
}
}
/**
* Stringify a CST document, token, or collection item
*
* Fair warning: This applies no validation whatsoever, and
* simply concatenates the sources in their logical order.
*/
const stringify$1 = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);
function stringifyToken(token) {
switch (token.type) {
case 'block-scalar': {
let res = '';
for (const tok of token.props)
res += stringifyToken(tok);
return res + token.source;
}
case 'block-map':
case 'block-seq': {
let res = '';
for (const item of token.items)
res += stringifyItem(item);
return res;
}
case 'flow-collection': {
let res = token.start.source;
for (const item of token.items)
res += stringifyItem(item);
for (const st of token.end)
res += st.source;
return res;
}
case 'document': {
let res = stringifyItem(token);
if (token.end)
for (const st of token.end)
res += st.source;
return res;
}
default: {
let res = token.source;
if ('end' in token && token.end)
for (const st of token.end)
res += st.source;
return res;
}
}
}
function stringifyItem({ start, key, sep, value }) {
let res = '';
for (const st of start)
res += st.source;
if (key)
res += stringifyToken(key);
if (sep)
for (const st of sep)
res += st.source;
if (value)
res += stringifyToken(value);
return res;
}
const BREAK = Symbol('break visit');
const SKIP = Symbol('skip children');
const REMOVE = Symbol('remove item');
/**
* Apply a visitor to a CST document or item.
*
* Walks through the tree (depth-first) starting from the root, calling a
* `visitor` function with two arguments when entering each item:
* - `item`: The current item, which included the following members:
* - `start: SourceToken[]` – Source tokens before the key or value,
* possibly including its anchor or tag.
* - `key?: Token | null` – Set for pair values. May then be `null`, if
* the key before the `:` separator is empty.
* - `sep?: SourceToken[]` – Source tokens between the key and the value,
* which should include the `:` map value indicator if `value` is set.
* - `value?: Token` – The value of a sequence item, or of a map pair.
* - `path`: The steps from the root to the current node, as an array of
* `['key' | 'value', number]` tuples.
*
* The return value of the visitor may be used to control the traversal:
* - `undefined` (default): Do nothing and continue
* - `visit.SKIP`: Do not visit the children of this token, continue with
* next sibling
* - `visit.BREAK`: Terminate traversal completely
* - `visit.REMOVE`: Remove the current item, then continue with the next one
* - `number`: Set the index of the next step. This is useful especially if
* the index of the current token has changed.
* - `function`: Define the next visitor for this item. After the original
* visitor is called on item entry, next visitors are called after handling
* a non-empty `key` and when exiting the item.
*/
function visit(cst, visitor) {
if ('type' in cst && cst.type === 'document')
cst = { start: cst.start, value: cst.value };
_visit(Object.freeze([]), cst, visitor);
}
// Without the `as symbol` casts, TS declares these in the `visit`
// namespace using `var`, but then complains about that because
// `unique symbol` must be `const`.
/** Terminate visit traversal completely */
visit.BREAK = BREAK;
/** Do not visit the children of the current item */
visit.SKIP = SKIP;
/** Remove the current item */
visit.REMOVE = REMOVE;
/** Find the item at `path` from `cst` as the root */
visit.itemAtPath = (cst, path) => {
let item = cst;
for (const [field, index] of path) {
const tok = item?.[field];
if (tok && 'items' in tok) {
item = tok.items[index];
}
else
return undefined;
}
return item;
};
/**
* Get the immediate parent collection of the item at `path` from `cst` as the root.
*
* Throws an error if the collection is not found, which should never happen if the item itself exists.
*/
visit.parentCollection = (cst, path) => {
const parent = visit.itemAtPath(cst, path.slice(0, -1));
const field = path[path.length - 1][0];
const coll = parent?.[field];
if (coll && 'items' in coll)
return coll;
throw new Error('Parent collection not found');
};
function _visit(path, item, visitor) {
let ctrl = visitor(item, path);
if (typeof ctrl === 'symbol')
return ctrl;
for (const field of ['key', 'value']) {
const token = item[field];
if (token && 'items' in token) {
for (let i = 0; i < token.items.length; ++i) {
const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);
if (typeof ci === 'number')
i = ci - 1;
else if (ci === BREAK)
return BREAK;
else if (ci === REMOVE) {
token.items.splice(i, 1);
i -= 1;
}
}
if (typeof ctrl === 'function' && field === 'key')
ctrl = ctrl(item, path);
}
}
return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;
}
/** The byte order mark */
const BOM = '\u{FEFF}';
/** Start of doc-mode */
const DOCUMENT = '\x02'; // C0: Start of Text
/** Unexpected end of flow-mode */
const FLOW_END = '\x18'; // C0: Cancel
/** Next token is a scalar value */
const SCALAR = '\x1f'; // C0: Unit Separator
/** @returns `true` if `token` is a flow or block collection */
const isCollection = (token) => !!token && 'items' in token;
/** @returns `true` if `token` is a flow or block scalar; not an alias */
const isScalar = (token) => !!token &&
(token.type === 'scalar' ||
token.type === 'single-quoted-scalar' ||
token.type === 'double-quoted-scalar' ||
token.type === 'block-scalar');
/* istanbul ignore next */
/** Get a printable representation of a lexer token */
function prettyToken(token) {
switch (token) {
case BOM:
return '<BOM>';
case DOCUMENT:
return '<DOC>';
case FLOW_END:
return '<FLOW_END>';
case SCALAR:
return '<SCALAR>';
default:
return JSON.stringify(token);
}
}
/** Identify the type of a lexer token. May return `null` for unknown tokens. */
function tokenType(source) {
switch (source) {
case BOM:
return 'byte-order-mark';
case DOCUMENT:
return 'doc-mode';
case FLOW_END:
return 'flow-error-end';
case SCALAR:
return 'scalar';
case '---':
return 'doc-start';
case '...':
return 'doc-end';
case '':
case '\n':
case '\r\n':
return 'newline';
case '-':
return 'seq-item-ind';
case '?':
return 'explicit-key-ind';
case ':':
return 'map-value-ind';
case '{':
return 'flow-map-start';
case '}':
return 'flow-map-end';
case '[':
return 'flow-seq-start';
case ']':
return 'flow-seq-end';
case ',':
return 'comma';
}
switch (source[0]) {
case ' ':
case '\t':
return 'space';
case '#':
return 'comment';
case '%':
return 'directive-line';
case '*':
return 'alias';
case '&':
return 'anchor';
case '!':
return 'tag';
case "'":
return 'single-quoted-scalar';
case '"':
return 'double-quoted-scalar';
case '|':
case '>':
return 'block-scalar-header';
}
return null;
}
var cst = {
__proto__: null,
BOM: BOM,
DOCUMENT: DOCUMENT,
FLOW_END: FLOW_END,
SCALAR: SCALAR,
createScalarToken: createScalarToken,
isCollection: isCollection,
isScalar: isScalar,
prettyToken: prettyToken,
resolveAsScalar: resolveAsScalar,
setScalarValue: setScalarValue,
stringify: stringify$1,
tokenType: tokenType,
visit: visit
};
/*
START -> stream
stream
directive -> line-end -> stream
indent + line-end -> stream
[else] -> line-start
line-end
comment -> line-end
newline -> .
input-end -> END
line-start
doc-start -> doc
doc-end -> stream
[else] -> indent -> block-start
block-start
seq-item-start -> block-start
explicit-key-start -> block-start
map-value-start -> block-start
[else] -> doc
doc
line-end -> line-start
spaces -> doc
anchor -> doc
tag -> doc
flow-start -> flow -> doc
flow-end -> error -> doc
seq-item-start -> error -> doc
explicit-key-start -> error -> doc
map-value-start -> doc
alias -> doc
quote-start -> quoted-scalar -> doc
block-scalar-header -> line-end -> block-scalar(min) -> line-start
[else] -> plain-scalar(false, min) -> doc
flow
line-end -> flow
spaces -> flow
anchor -> flow
tag -> flow
flow-start -> flow -> flow
flow-end -> .
seq-item-start -> error -> flow
explicit-key-start -> flow
map-value-start -> flow
alias -> flow
quote-start -> quoted-scalar -> flow
comma -> flow
[else] -> plain-scalar(true, 0) -> flow
quoted-scalar
quote-end -> .
[else] -> quoted-scalar
block-scalar(min)
newline + peek(indent < min) -> .
[else] -> block-scalar(min)
plain-scalar(is-flow, min)
scalar-end(is-flow) -> .
peek(newline + (indent < min)) -> .
[else] -> plain-scalar(min)
*/
function isEmpty(ch) {
switch (ch) {
case undefined:
case ' ':
case '\n':
case '\r':
case '\t':
return true;
default:
return false;
}
}
const hexDigits = new Set('0123456789ABCDEFabcdef');
const tagChars = new Set("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()");
const flowIndicatorChars = new Set(',[]{}');
const invalidAnchorChars = new Set(' ,[]{}\n\r\t');
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.has(ch);
/**
* Splits an input string into lexical tokens, i.e. smaller strings that are
* easily identifiable by `tokens.tokenType()`.
*
* Lexing starts always in a "stream" context. Incomplete input may be buffered
* until a complete token can be emitted.
*
* In addition to slices of the original input, the following control characters
* may also be emitted:
*
* - `\x02` (Start of Text): A document starts with the next token
* - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error)
* - `\x1f` (Unit Separator): Next token is a scalar value
* - `\u{FEFF}` (Byte order mark): Emitted separately outside documents
*/
class Lexer {
constructor() {
/**
* Flag indicating whether the end of the current buffer marks the end of
* all input
*/
this.atEnd = false;
/**
* Explicit indent set in block scalar header, as an offset from the current
* minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not
* explicitly set.
*/
this.blockScalarIndent = -1;
/**
* Block scalars that include a + (keep) chomping indicator in their header
* include trailing empty lines, which are otherwise excluded from the
* scalar's contents.
*/
this.blockScalarKeep = false;
/** Current input */
this.buffer = '';
/**
* Flag noting whether the map value indicator : can immediately follow this
* node within a flow context.
*/
this.flowKey = false;
/** Count of surrounding flow collection levels. */
this.flowLevel = 0;
/**
* Minimum level of indentation required for next lines to be parsed as a
* part of the current scalar value.
*/
this.indentNext = 0;
/** Indentation level of the current line. */
this.indentValue = 0;
/** Position of the next \n character. */
this.lineEndPos = null;
/** Stores the state of the lexer if reaching the end of incpomplete input */
this.next = null;
/** A pointer to `buffer`; the current position of the lexer. */
this.pos = 0;
}
/**
* Generate YAML tokens from the `source` string. If `incomplete`,
* a part of the last line may be left as a buffer for the next call.
*
* @returns A generator of lexical tokens
*/
*lex(source, incomplete = false) {
if (source) {
if (typeof source !== 'string')
throw TypeError('source is not a string');
this.buffer = this.buffer ? this.buffer + source : source;
this.lineEndPos = null;
}
this.atEnd = !incomplete;
let next = this.next ?? 'stream';
while (next && (incomplete || this.hasChars(1)))
next = yield* this.parseNext(next);
}
atLineEnd() {
let i = this.pos;
let ch = this.buffer[i];
while (ch === ' ' || ch === '\t')
ch = this.buffer[++i];
if (!ch || ch === '#' || ch === '\n')
return true;
if (ch === '\r')
return this.buffer[i + 1] === '\n';
return false;
}
charAt(n) {
return this.buffer[this.pos + n];
}
continueScalar(offset) {
let ch = this.buffer[offset];
if (this.indentNext > 0) {
let indent = 0;
while (ch === ' ')
ch = this.buffer[++indent + offset];
if (ch === '\r') {
const next = this.buffer[indent + offset + 1];
if (next === '\n' || (!next && !this.atEnd))
return offset + indent + 1;
}
return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd)
? offset + indent
: -1;
}
if (ch === '-' || ch === '.') {
const dt = this.buffer.substr(offset, 3);
if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))
return -1;
}
return offset;
}
getLine() {
let end = this.lineEndPos;
if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {
end = this.buffer.indexOf('\n', this.pos);
this.lineEndPos = end;
}
if (end === -1)
return this.atEnd ? this.buffer.substring(this.pos) : null;
if (this.buffer[end - 1] === '\r')
end -= 1;
return this.buffer.substring(this.pos, end);
}
hasChars(n) {
return this.pos + n <= this.buffer.length;
}
setNext(state) {
this.buffer = this.buffer.substring(this.pos);
this.pos = 0;
this.lineEndPos = null;
this.next = state;
return null;
}
peek(n) {
return this.buffer.substr(this.pos, n);
}
*parseNext(next) {
switch (next) {
case 'stream':
return yield* this.parseStream();
case 'line-start':
return yield* this.parseLineStart();
case 'block-start':
return yield* this.parseBlockStart();
case 'doc':
return yield* this.parseDocument();
case 'flow':
return yield* this.parseFlowCollection();
case 'quoted-scalar':
return yield* this.parseQuotedScalar();
case 'block-scalar':
return yield* this.parseBlockScalar();
case 'plain-scalar':
return yield* this.parsePlainScalar();
}
}
*parseStream() {
let line = this.getLine();
if (line === null)
return this.setNext('stream');
if (line[0] === BOM) {
yield* this.pushCount(1);
line = line.substring(1);
}
if (line[0] === '%') {
let dirEnd = line.length;
let cs = line.indexOf('#');
while (cs !== -1) {
const ch = line[cs - 1];
if (ch === ' ' || ch === '\t') {
dirEnd = cs - 1;
break;
}
else {
cs = line.indexOf('#', cs + 1);
}
}
while (true) {
const ch = line[dirEnd - 1];
if (ch === ' ' || ch === '\t')
dirEnd -= 1;
else
break;
}
const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));
yield* this.pushCount(line.length - n); // possible comment
this.pushNewline();
return 'stream';
}
if (this.atLineEnd()) {
const sp = yield* this.pushSpaces(true);
yield* this.pushCount(line.length - sp);
yield* this.pushNewline();
return 'stream';
}
yield DOCUMENT;
return yield* this.parseLineStart();
}
*parseLineStart() {
const ch = this.charAt(0);
if (!ch && !this.atEnd)
return this.setNext('line-start');
if (ch === '-' || ch === '.') {
if (!this.atEnd && !this.hasChars(4))
return this.setNext('line-start');
const s = this.peek(3);
if ((s === '---' || s === '...') && isEmpty(this.charAt(3))) {
yield* this.pushCount(3);
this.indentValue = 0;
this.indentNext = 0;
return s === '---' ? 'doc' : 'stream';
}
}
this.indentValue = yield* this.pushSpaces(false);
if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))
this.indentNext = this.indentValue;
return yield* this.parseBlockStart();
}
*parseBlockStart() {
const [ch0, ch1] = this.peek(2);
if (!ch1 && !this.atEnd)
return this.setNext('block-start');
if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {
const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));
this.indentNext = this.indentValue + 1;
this.indentValue += n;
return yield* this.parseBlockStart();
}
return 'doc';
}
*parseDocument() {
yield* this.pushSpaces(true);
const line = this.getLine();
if (line === null)
return this.setNext('doc');
let n = yield* this.pushIndicators();
switch (line[n]) {
case '#':
yield* this.pushCount(line.length - n);
// fallthrough
case undefined:
yield* this.pushNewline();
return yield* this.parseLineStart();
case '{':
case '[':
yield* this.pushCount(1);
this.flowKey = false;
this.flowLevel = 1;
return 'flow';
case '}':
case ']':
// this is an error
yield* this.pushCount(1);
return 'doc';
case '*':
yield* this.pushUntil(isNotAnchorChar);
return 'doc';
case '"':
case "'":
return yield* this.parseQuotedScalar();
case '|':
case '>':
n += yield* this.parseBlockScalarHeader();
n += yield* this.pushSpaces(true);
yield* this.pushCount(line.length - n);
yield* this.pushNewline();
return yield* this.parseBlockScalar();
default:
return yield* this.parsePlainScalar();
}
}
*parseFlowCollection() {
let nl, sp;
let indent = -1;
do {
nl = yield* this.pushNewline();
if (nl > 0) {
sp = yield* this.pushSpaces(false);
this.indentValue = indent = sp;
}
else {
sp = 0;
}
sp += yield* this.pushSpaces(true);
} while (nl + sp > 0);
const line = this.getLine();
if (line === null)
return this.setNext('flow');
if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||
(indent === 0 &&
(line.startsWith('---') || line.startsWith('...')) &&
isEmpty(line[3]))) {
// Allowing for the terminal ] or } at the same (rather than greater)
// indent level as the initial [ or { is technically invalid, but
// failing here would be surprising to users.
const atFlowEndMarker = indent === this.indentNext - 1 &&
this.flowLevel === 1 &&
(line[0] === ']' || line[0] === '}');
if (!atFlowEndMarker) {
// this is an error
this.flowLevel = 0;
yield FLOW_END;
return yield* this.parseLineStart();
}
}
let n = 0;
while (line[n] === ',') {
n += yield* this.pushCount(1);
n += yield* this.pushSpaces(true);
this.flowKey = false;
}
n += yield* this.pushIndicators();
switch (line[n]) {
case undefined:
return 'flow';
case '#':
yield* this.pushCount(line.length - n);
return 'flow';
case '{':
case '[':
yield* this.pushCount(1);
this.flowKey = false;
this.flowLevel += 1;
return 'flow';
case '}':
case ']':
yield* this.pushCount(1);
this.flowKey = true;
this.flowLevel -= 1;
return this.flowLevel ? 'flow' : 'doc';
case '*':
yield* this.pushUntil(isNotAnchorChar);
return 'flow';
case '"':
case "'":
this.flowKey = true;
return yield* this.parseQuotedScalar();
case ':': {
const next = this.charAt(1);
if (this.flowKey || isEmpty(next) || next === ',') {
this.flowKey = false;
yield* this.pushCount(1);
yield* this.pushSpaces(true);
return 'flow';
}
}
// fallthrough
default:
this.flowKey = false;
return yield* this.parsePlainScalar();
}
}
*parseQuotedScalar() {
const quote = this.charAt(0);
let end = this.buffer.indexOf(quote, this.pos + 1);
if (quote === "'") {
while (end !== -1 && this.buffer[end + 1] === "'")
end = this.buffer.indexOf("'", end + 2);
}
else {
// double-quote
while (end !== -1) {
let n = 0;
while (this.buffer[end - 1 - n] === '\\')
n += 1;
if (n % 2 === 0)
break;
end = this.buffer.indexOf('"', end + 1);
}
}
// Only looking for newlines within the quotes
const qb = this.buffer.substring(0, end);
let nl = qb.indexOf('\n', this.pos);
if (nl !== -1) {
while (nl !== -1) {
const cs = this.continueScalar(nl + 1);
if (cs === -1)
break;
nl = qb.indexOf('\n', cs);
}
if (nl !== -1) {
// this is an error caused by an unexpected unindent
end = nl - (qb[nl - 1] === '\r' ? 2 : 1);
}
}
if (end === -1) {
if (!this.atEnd)
return this.setNext('quoted-scalar');
end = this.buffer.length;
}
yield* this.pushToIndex(end + 1, false);
return this.flowLevel ? 'flow' : 'doc';
}
*parseBlockScalarHeader() {
this.blockScalarIndent = -1;
this.blockScalarKeep = false;
let i = this.pos;
while (true) {
const ch = this.buffer[++i];
if (ch === '+')
this.blockScalarKeep = true;
else if (ch > '0' && ch <= '9')
this.blockScalarIndent = Number(ch) - 1;
else if (ch !== '-')
break;
}
return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');
}
*parseBlockScalar() {
let nl = this.pos - 1; // may be -1 if this.pos === 0
let indent = 0;
let ch;
loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {
switch (ch) {
case ' ':
indent += 1;
break;
case '\n':
nl = i;
indent = 0;
break;
case '\r': {
const next = this.buffer[i + 1];
if (!next && !this.atEnd)
return this.setNext('block-scalar');
if (next === '\n')
break;
} // fallthrough
default:
break loop;
}
}
if (!ch && !this.atEnd)
return this.setNext('block-scalar');
if (indent >= this.indentNext) {
if (this.blockScalarIndent === -1)
this.indentNext = indent;
else {
this.indentNext =
this.blockScalarIndent + (this.indentNext === 0 ? 1 : this.indentNext);
}
do {
const cs = this.continueScalar(nl + 1);
if (cs === -1)
break;
nl = this.buffer.indexOf('\n', cs);
} while (nl !== -1);
if (nl === -1) {
if (!this.atEnd)
return this.setNext('block-scalar');
nl = this.buffer.length;
}
}
// Trailing insufficiently indented tabs are invalid.
// To catch that during parsing, we include them in the block scalar value.
let i = nl + 1;
ch = this.buffer[i];
while (ch === ' ')
ch = this.buffer[++i];
if (ch === '\t') {
while (ch === '\t' || ch === ' ' || ch === '\r' || ch === '\n')
ch = this.buffer[++i];
nl = i - 1;
}
else if (!this.blockScalarKeep) {
do {
let i = nl - 1;
let ch = this.buffer[i];
if (ch === '\r')
ch = this.buffer[--i];
const lastChar = i; // Drop the line if last char not more indented
while (ch === ' ')
ch = this.buffer[--i];
if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar)
nl = i;
else
break;
} while (true);
}
yield SCALAR;
yield* this.pushToIndex(nl + 1, true);
return yield* this.parseLineStart();
}
*parsePlainScalar() {
const inFlow = this.flowLevel > 0;
let end = this.pos - 1;
let i = this.pos - 1;
let ch;
while ((ch = this.buffer[++i])) {
if (ch === ':') {
const next = this.buffer[i + 1];
if (isEmpty(next) || (inFlow && flowIndicatorChars.has(next)))
break;
end = i;
}
else if (isEmpty(ch)) {
let next = this.buffer[i + 1];
if (ch === '\r') {
if (next === '\n') {
i += 1;
ch = '\n';
next = this.buffer[i + 1];
}
else
end = i;
}
if (next === '#' || (inFlow && flowIndicatorChars.has(next)))
break;
if (ch === '\n') {
const cs = this.continueScalar(i + 1);
if (cs === -1)
break;
i = Math.max(i, cs - 2); // to advance, but still account for ' #'
}
}
else {
if (inFlow && flowIndicatorChars.has(ch))
break;
end = i;
}
}
if (!ch && !this.atEnd)
return this.setNext('plain-scalar');
yield SCALAR;
yield* this.pushToIndex(end + 1, true);
return inFlow ? 'flow' : 'doc';
}
*pushCount(n) {
if (n > 0) {
yield this.buffer.substr(this.pos, n);
this.pos += n;
return n;
}
return 0;
}
*pushToIndex(i, allowEmpty) {
const s = this.buffer.slice(this.pos, i);
if (s) {
yield s;
this.pos += s.length;
return s.length;
}
else if (allowEmpty)
yield '';
return 0;
}
*pushIndicators() {
switch (this.charAt(0)) {
case '!':
return ((yield* this.pushTag()) +
(yield* this.pushSpaces(true)) +
(yield* this.pushIndicators()));
case '&':
return ((yield* this.pushUntil(isNotAnchorChar)) +
(yield* this.pushSpaces(true)) +
(yield* this.pushIndicators()));
case '-': // this is an error
case '?': // this is an error outside flow collections
case ':': {
const inFlow = this.flowLevel > 0;
const ch1 = this.charAt(1);
if (isEmpty(ch1) || (inFlow && flowIndicatorChars.has(ch1))) {
if (!inFlow)
this.indentNext = this.indentValue + 1;
else if (this.flowKey)
this.flowKey = false;
return ((yield* this.pushCount(1)) +
(yield* this.pushSpaces(true)) +
(yield* this.pushIndicators()));
}
}
}
return 0;
}
*pushTag() {
if (this.charAt(1) === '<') {
let i = this.pos + 2;
let ch = this.buffer[i];
while (!isEmpty(ch) && ch !== '>')
ch = this.buffer[++i];
return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);
}
else {
let i = this.pos + 1;
let ch = this.buffer[i];
while (ch) {
if (tagChars.has(ch))
ch = this.buffer[++i];
else if (ch === '%' &&
hexDigits.has(this.buffer[i + 1]) &&
hexDigits.has(this.buffer[i + 2])) {
ch = this.buffer[(i += 3)];
}
else
break;
}
return yield* this.pushToIndex(i, false);
}
}
*pushNewline() {
const ch = this.buffer[this.pos];
if (ch === '\n')
return yield* this.pushCount(1);
else if (ch === '\r' && this.charAt(1) === '\n')
return yield* this.pushCount(2);
else
return 0;
}
*pushSpaces(allowTabs) {
let i = this.pos - 1;
let ch;
do {
ch = this.buffer[++i];
} while (ch === ' ' || (allowTabs && ch === '\t'));
const n = i - this.pos;
if (n > 0) {
yield this.buffer.substr(this.pos, n);
this.pos = i;
}
return n;
}
*pushUntil(test) {
let i = this.pos;
let ch = this.buffer[i];
while (!test(ch))
ch = this.buffer[++i];
return yield* this.pushToIndex(i, false);
}
}
/**
* Tracks newlines during parsing in order to provide an efficient API for
* determining the one-indexed `{ line, col }` position for any offset
* within the input.
*/
class LineCounter {
constructor() {
this.lineStarts = [];
/**
* Should be called in ascending order. Otherwise, call
* `lineCounter.lineStarts.sort()` before calling `linePos()`.
*/
this.addNewLine = (offset) => this.lineStarts.push(offset);
/**
* Performs a binary search and returns the 1-indexed { line, col }
* position of `offset`. If `line === 0`, `addNewLine` has never been
* called or `offset` is before the first known newline.
*/
this.linePos = (offset) => {
let low = 0;
let high = this.lineStarts.length;
while (low < high) {
const mid = (low + high) >> 1; // Math.floor((low + high) / 2)
if (this.lineStarts[mid] < offset)
low = mid + 1;
else
high = mid;
}
if (this.lineStarts[low] === offset)
return { line: low + 1, col: 1 };
if (low === 0)
return { line: 0, col: offset };
const start = this.lineStarts[low - 1];
return { line: low, col: offset - start + 1 };
};
}
}
function includesToken(list, type) {
for (let i = 0; i < list.length; ++i)
if (list[i].type === type)
return true;
return false;
}
function findNonEmptyIndex(list) {
for (let i = 0; i < list.length; ++i) {
switch (list[i].type) {
case 'space':
case 'comment':
case 'newline':
break;
default:
return i;
}
}
return -1;
}
function isFlowToken(token) {
switch (token?.type) {
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
case 'flow-collection':
return true;
default:
return false;
}
}
function getPrevProps(parent) {
switch (parent.type) {
case 'document':
return parent.start;
case 'block-map': {
const it = parent.items[parent.items.length - 1];
return it.sep ?? it.start;
}
case 'block-seq':
return parent.items[parent.items.length - 1].start;
/* istanbul ignore next should not happen */
default:
return [];
}
}
/** Note: May modify input array */
function getFirstKeyStartProps(prev) {
if (prev.length === 0)
return [];
let i = prev.length;
loop: while (--i >= 0) {
switch (prev[i].type) {
case 'doc-start':
case 'explicit-key-ind':
case 'map-value-ind':
case 'seq-item-ind':
case 'newline':
break loop;
}
}
return prev.splice(i, prev.length);
}
function fixFlowSeqItems(fc) {
if (fc.start.type === 'flow-seq-start') {
for (const it of fc.items) {
if (it.sep &&
!it.value &&
!includesToken(it.start, 'explicit-key-ind') &&
!includesToken(it.sep, 'map-value-ind')) {
if (it.key)
it.value = it.key;
delete it.key;
if (isFlowToken(it.value)) {
if (it.value.end)
Array.prototype.push.apply(it.value.end, it.sep);
else
it.value.end = it.sep;
}
else
Array.prototype.push.apply(it.start, it.sep);
delete it.sep;
}
}
}
}
/**
* A YAML concrete syntax tree (CST) parser
*
* ```ts
* const src: string = ...
* for (const token of new Parser().parse(src)) {
* // token: Token
* }
* ```
*
* To use the parser with a user-provided lexer:
*
* ```ts
* function* parse(source: string, lexer: Lexer) {
* const parser = new Parser()
* for (const lexeme of lexer.lex(source))
* yield* parser.next(lexeme)
* yield* parser.end()
* }
*
* const src: string = ...
* const lexer = new Lexer()
* for (const token of parse(src, lexer)) {
* // token: Token
* }
* ```
*/
class Parser {
/**
* @param onNewLine - If defined, called separately with the start position of
* each new line (in `parse()`, including the start of input).
*/
constructor(onNewLine) {
/** If true, space and sequence indicators count as indentation */
this.atNewLine = true;
/** If true, next token is a scalar value */
this.atScalar = false;
/** Current indentation level */
this.indent = 0;
/** Current offset since the start of parsing */
this.offset = 0;
/** On the same line with a block map key */
this.onKeyLine = false;
/** Top indicates the node that's currently being built */
this.stack = [];
/** The source of the current token, set in parse() */
this.source = '';
/** The type of the current token, set in parse() */
this.type = '';
// Must be defined after `next()`
this.lexer = new Lexer();
this.onNewLine = onNewLine;
}
/**
* Parse `source` as a YAML stream.
* If `incomplete`, a part of the last line may be left as a buffer for the next call.
*
* Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.
*
* @returns A generator of tokens representing each directive, document, and other structure.
*/
*parse(source, incomplete = false) {
if (this.onNewLine && this.offset === 0)
this.onNewLine(0);
for (const lexeme of this.lexer.lex(source, incomplete))
yield* this.next(lexeme);
if (!incomplete)
yield* this.end();
}
/**
* Advance the parser by the `source` of one lexical token.
*/
*next(source) {
this.source = source;
if (this.atScalar) {
this.atScalar = false;
yield* this.step();
this.offset += source.length;
return;
}
const type = tokenType(source);
if (!type) {
const message = `Not a YAML token: ${source}`;
yield* this.pop({ type: 'error', offset: this.offset, message, source });
this.offset += source.length;
}
else if (type === 'scalar') {
this.atNewLine = false;
this.atScalar = true;
this.type = 'scalar';
}
else {
this.type = type;
yield* this.step();
switch (type) {
case 'newline':
this.atNewLine = true;
this.indent = 0;
if (this.onNewLine)
this.onNewLine(this.offset + source.length);
break;
case 'space':
if (this.atNewLine && source[0] === ' ')
this.indent += source.length;
break;
case 'explicit-key-ind':
case 'map-value-ind':
case 'seq-item-ind':
if (this.atNewLine)
this.indent += source.length;
break;
case 'doc-mode':
case 'flow-error-end':
return;
default:
this.atNewLine = false;
}
this.offset += source.length;
}
}
/** Call at end of input to push out any remaining constructions */
*end() {
while (this.stack.length > 0)
yield* this.pop();
}
get sourceToken() {
const st = {
type: this.type,
offset: this.offset,
indent: this.indent,
source: this.source
};
return st;
}
*step() {
const top = this.peek(1);
if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {
while (this.stack.length > 0)
yield* this.pop();
this.stack.push({
type: 'doc-end',
offset: this.offset,
source: this.source
});
return;
}
if (!top)
return yield* this.stream();
switch (top.type) {
case 'document':
return yield* this.document(top);
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
return yield* this.scalar(top);
case 'block-scalar':
return yield* this.blockScalar(top);
case 'block-map':
return yield* this.blockMap(top);
case 'block-seq':
return yield* this.blockSequence(top);
case 'flow-collection':
return yield* this.flowCollection(top);
case 'doc-end':
return yield* this.documentEnd(top);
}
/* istanbul ignore next should not happen */
yield* this.pop();
}
peek(n) {
return this.stack[this.stack.length - n];
}
*pop(error) {
const token = error ?? this.stack.pop();
/* istanbul ignore if should not happen */
if (!token) {
const message = 'Tried to pop an empty stack';
yield { type: 'error', offset: this.offset, source: '', message };
}
else if (this.stack.length === 0) {
yield token;
}
else {
const top = this.peek(1);
if (token.type === 'block-scalar') {
// Block scalars use their parent rather than header indent
token.indent = 'indent' in top ? top.indent : 0;
}
else if (token.type === 'flow-collection' && top.type === 'document') {
// Ignore all indent for top-level flow collections
token.indent = 0;
}
if (token.type === 'flow-collection')
fixFlowSeqItems(token);
switch (top.type) {
case 'document':
top.value = token;
break;
case 'block-scalar':
top.props.push(token); // error
break;
case 'block-map': {
const it = top.items[top.items.length - 1];
if (it.value) {
top.items.push({ start: [], key: token, sep: [] });
this.onKeyLine = true;
return;
}
else if (it.sep) {
it.value = token;
}
else {
Object.assign(it, { key: token, sep: [] });
this.onKeyLine = !it.explicitKey;
return;
}
break;
}
case 'block-seq': {
const it = top.items[top.items.length - 1];
if (it.value)
top.items.push({ start: [], value: token });
else
it.value = token;
break;
}
case 'flow-collection': {
const it = top.items[top.items.length - 1];
if (!it || it.value)
top.items.push({ start: [], key: token, sep: [] });
else if (it.sep)
it.value = token;
else
Object.assign(it, { key: token, sep: [] });
return;
}
/* istanbul ignore next should not happen */
default:
yield* this.pop();
yield* this.pop(token);
}
if ((top.type === 'document' ||
top.type === 'block-map' ||
top.type === 'block-seq') &&
(token.type === 'block-map' || token.type === 'block-seq')) {
const last = token.items[token.items.length - 1];
if (last &&
!last.sep &&
!last.value &&
last.start.length > 0 &&
findNonEmptyIndex(last.start) === -1 &&
(token.indent === 0 ||
last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {
if (top.type === 'document')
top.end = last.start;
else
top.items.push({ start: last.start });
token.items.splice(-1, 1);
}
}
}
}
*stream() {
switch (this.type) {
case 'directive-line':
yield { type: 'directive', offset: this.offset, source: this.source };
return;
case 'byte-order-mark':
case 'space':
case 'comment':
case 'newline':
yield this.sourceToken;
return;
case 'doc-mode':
case 'doc-start': {
const doc = {
type: 'document',
offset: this.offset,
start: []
};
if (this.type === 'doc-start')
doc.start.push(this.sourceToken);
this.stack.push(doc);
return;
}
}
yield {
type: 'error',
offset: this.offset,
message: `Unexpected ${this.type} token in YAML stream`,
source: this.source
};
}
*document(doc) {
if (doc.value)
return yield* this.lineEnd(doc);
switch (this.type) {
case 'doc-start': {
if (findNonEmptyIndex(doc.start) !== -1) {
yield* this.pop();
yield* this.step();
}
else
doc.start.push(this.sourceToken);
return;
}
case 'anchor':
case 'tag':
case 'space':
case 'comment':
case 'newline':
doc.start.push(this.sourceToken);
return;
}
const bv = this.startBlockValue(doc);
if (bv)
this.stack.push(bv);
else {
yield {
type: 'error',
offset: this.offset,
message: `Unexpected ${this.type} token in YAML document`,
source: this.source
};
}
}
*scalar(scalar) {
if (this.type === 'map-value-ind') {
const prev = getPrevProps(this.peek(2));
const start = getFirstKeyStartProps(prev);
let sep;
if (scalar.end) {
sep = scalar.end;
sep.push(this.sourceToken);
delete scalar.end;
}
else
sep = [this.sourceToken];
const map = {
type: 'block-map',
offset: scalar.offset,
indent: scalar.indent,
items: [{ start, key: scalar, sep }]
};
this.onKeyLine = true;
this.stack[this.stack.length - 1] = map;
}
else
yield* this.lineEnd(scalar);
}
*blockScalar(scalar) {
switch (this.type) {
case 'space':
case 'comment':
case 'newline':
scalar.props.push(this.sourceToken);
return;
case 'scalar':
scalar.source = this.source;
// block-scalar source includes trailing newline
this.atNewLine = true;
this.indent = 0;
if (this.onNewLine) {
let nl = this.source.indexOf('\n') + 1;
while (nl !== 0) {
this.onNewLine(this.offset + nl);
nl = this.source.indexOf('\n', nl) + 1;
}
}
yield* this.pop();
break;
/* istanbul ignore next should not happen */
default:
yield* this.pop();
yield* this.step();
}
}
*blockMap(map) {
const it = map.items[map.items.length - 1];
// it.sep is true-ish if pair already has key or : separator
switch (this.type) {
case 'newline':
this.onKeyLine = false;
if (it.value) {
const end = 'end' in it.value ? it.value.end : undefined;
const last = Array.isArray(end) ? end[end.length - 1] : undefined;
if (last?.type === 'comment')
end?.push(this.sourceToken);
else
map.items.push({ start: [this.sourceToken] });
}
else if (it.sep) {
it.sep.push(this.sourceToken);
}
else {
it.start.push(this.sourceToken);
}
return;
case 'space':
case 'comment':
if (it.value) {
map.items.push({ start: [this.sourceToken] });
}
else if (it.sep) {
it.sep.push(this.sourceToken);
}
else {
if (this.atIndentedComment(it.start, map.indent)) {
const prev = map.items[map.items.length - 2];
const end = prev?.value?.end;
if (Array.isArray(end)) {
Array.prototype.push.apply(end, it.start);
end.push(this.sourceToken);
map.items.pop();
return;
}
}
it.start.push(this.sourceToken);
}
return;
}
if (this.indent >= map.indent) {
const atMapIndent = !this.onKeyLine && this.indent === map.indent;
const atNextItem = atMapIndent &&
(it.sep || it.explicitKey) &&
this.type !== 'seq-item-ind';
// For empty nodes, assign newline-separated not indented empty tokens to following node
let start = [];
if (atNextItem && it.sep && !it.value) {
const nl = [];
for (let i = 0; i < it.sep.length; ++i) {
const st = it.sep[i];
switch (st.type) {
case 'newline':
nl.push(i);
break;
case 'space':
break;
case 'comment':
if (st.indent > map.indent)
nl.length = 0;
break;
default:
nl.length = 0;
}
}
if (nl.length >= 2)
start = it.sep.splice(nl[1]);
}
switch (this.type) {
case 'anchor':
case 'tag':
if (atNextItem || it.value) {
start.push(this.sourceToken);
map.items.push({ start });
this.onKeyLine = true;
}
else if (it.sep) {
it.sep.push(this.sourceToken);
}
else {
it.start.push(this.sourceToken);
}
return;
case 'explicit-key-ind':
if (!it.sep && !it.explicitKey) {
it.start.push(this.sourceToken);
it.explicitKey = true;
}
else if (atNextItem || it.value) {
start.push(this.sourceToken);
map.items.push({ start, explicitKey: true });
}
else {
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start: [this.sourceToken], explicitKey: true }]
});
}
this.onKeyLine = true;
return;
case 'map-value-ind':
if (it.explicitKey) {
if (!it.sep) {
if (includesToken(it.start, 'newline')) {
Object.assign(it, { key: null, sep: [this.sourceToken] });
}
else {
const start = getFirstKeyStartProps(it.start);
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start, key: null, sep: [this.sourceToken] }]
});
}
}
else if (it.value) {
map.items.push({ start: [], key: null, sep: [this.sourceToken] });
}
else if (includesToken(it.sep, 'map-value-ind')) {
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start, key: null, sep: [this.sourceToken] }]
});
}
else if (isFlowToken(it.key) &&
!includesToken(it.sep, 'newline')) {
const start = getFirstKeyStartProps(it.start);
const key = it.key;
const sep = it.sep;
sep.push(this.sourceToken);
// @ts-expect-error type guard is wrong here
delete it.key, delete it.sep;
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start, key, sep }]
});
}
else if (start.length > 0) {
// Not actually at next item
it.sep = it.sep.concat(start, this.sourceToken);
}
else {
it.sep.push(this.sourceToken);
}
}
else {
if (!it.sep) {
Object.assign(it, { key: null, sep: [this.sourceToken] });
}
else if (it.value || atNextItem) {
map.items.push({ start, key: null, sep: [this.sourceToken] });
}
else if (includesToken(it.sep, 'map-value-ind')) {
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start: [], key: null, sep: [this.sourceToken] }]
});
}
else {
it.sep.push(this.sourceToken);
}
}
this.onKeyLine = true;
return;
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar': {
const fs = this.flowScalar(this.type);
if (atNextItem || it.value) {
map.items.push({ start, key: fs, sep: [] });
this.onKeyLine = true;
}
else if (it.sep) {
this.stack.push(fs);
}
else {
Object.assign(it, { key: fs, sep: [] });
this.onKeyLine = true;
}
return;
}
default: {
const bv = this.startBlockValue(map);
if (bv) {
if (atMapIndent && bv.type !== 'block-seq') {
map.items.push({ start });
}
this.stack.push(bv);
return;
}
}
}
}
yield* this.pop();
yield* this.step();
}
*blockSequence(seq) {
const it = seq.items[seq.items.length - 1];
switch (this.type) {
case 'newline':
if (it.value) {
const end = 'end' in it.value ? it.value.end : undefined;
const last = Array.isArray(end) ? end[end.length - 1] : undefined;
if (last?.type === 'comment')
end?.push(this.sourceToken);
else
seq.items.push({ start: [this.sourceToken] });
}
else
it.start.push(this.sourceToken);
return;
case 'space':
case 'comment':
if (it.value)
seq.items.push({ start: [this.sourceToken] });
else {
if (this.atIndentedComment(it.start, seq.indent)) {
const prev = seq.items[seq.items.length - 2];
const end = prev?.value?.end;
if (Array.isArray(end)) {
Array.prototype.push.apply(end, it.start);
end.push(this.sourceToken);
seq.items.pop();
return;
}
}
it.start.push(this.sourceToken);
}
return;
case 'anchor':
case 'tag':
if (it.value || this.indent <= seq.indent)
break;
it.start.push(this.sourceToken);
return;
case 'seq-item-ind':
if (this.indent !== seq.indent)
break;
if (it.value || includesToken(it.start, 'seq-item-ind'))
seq.items.push({ start: [this.sourceToken] });
else
it.start.push(this.sourceToken);
return;
}
if (this.indent > seq.indent) {
const bv = this.startBlockValue(seq);
if (bv) {
this.stack.push(bv);
return;
}
}
yield* this.pop();
yield* this.step();
}
*flowCollection(fc) {
const it = fc.items[fc.items.length - 1];
if (this.type === 'flow-error-end') {
let top;
do {
yield* this.pop();
top = this.peek(1);
} while (top && top.type === 'flow-collection');
}
else if (fc.end.length === 0) {
switch (this.type) {
case 'comma':
case 'explicit-key-ind':
if (!it || it.sep)
fc.items.push({ start: [this.sourceToken] });
else
it.start.push(this.sourceToken);
return;
case 'map-value-ind':
if (!it || it.value)
fc.items.push({ start: [], key: null, sep: [this.sourceToken] });
else if (it.sep)
it.sep.push(this.sourceToken);
else
Object.assign(it, { key: null, sep: [this.sourceToken] });
return;
case 'space':
case 'comment':
case 'newline':
case 'anchor':
case 'tag':
if (!it || it.value)
fc.items.push({ start: [this.sourceToken] });
else if (it.sep)
it.sep.push(this.sourceToken);
else
it.start.push(this.sourceToken);
return;
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar': {
const fs = this.flowScalar(this.type);
if (!it || it.value)
fc.items.push({ start: [], key: fs, sep: [] });
else if (it.sep)
this.stack.push(fs);
else
Object.assign(it, { key: fs, sep: [] });
return;
}
case 'flow-map-end':
case 'flow-seq-end':
fc.end.push(this.sourceToken);
return;
}
const bv = this.startBlockValue(fc);
/* istanbul ignore else should not happen */
if (bv)
this.stack.push(bv);
else {
yield* this.pop();
yield* this.step();
}
}
else {
const parent = this.peek(2);
if (parent.type === 'block-map' &&
((this.type === 'map-value-ind' && parent.indent === fc.indent) ||
(this.type === 'newline' &&
!parent.items[parent.items.length - 1].sep))) {
yield* this.pop();
yield* this.step();
}
else if (this.type === 'map-value-ind' &&
parent.type !== 'flow-collection') {
const prev = getPrevProps(parent);
const start = getFirstKeyStartProps(prev);
fixFlowSeqItems(fc);
const sep = fc.end.splice(1, fc.end.length);
sep.push(this.sourceToken);
const map = {
type: 'block-map',
offset: fc.offset,
indent: fc.indent,
items: [{ start, key: fc, sep }]
};
this.onKeyLine = true;
this.stack[this.stack.length - 1] = map;
}
else {
yield* this.lineEnd(fc);
}
}
}
flowScalar(type) {
if (this.onNewLine) {
let nl = this.source.indexOf('\n') + 1;
while (nl !== 0) {
this.onNewLine(this.offset + nl);
nl = this.source.indexOf('\n', nl) + 1;
}
}
return {
type,
offset: this.offset,
indent: this.indent,
source: this.source
};
}
startBlockValue(parent) {
switch (this.type) {
case 'alias':
case 'scalar':
case 'single-quoted-scalar':
case 'double-quoted-scalar':
return this.flowScalar(this.type);
case 'block-scalar-header':
return {
type: 'block-scalar',
offset: this.offset,
indent: this.indent,
props: [this.sourceToken],
source: ''
};
case 'flow-map-start':
case 'flow-seq-start':
return {
type: 'flow-collection',
offset: this.offset,
indent: this.indent,
start: this.sourceToken,
items: [],
end: []
};
case 'seq-item-ind':
return {
type: 'block-seq',
offset: this.offset,
indent: this.indent,
items: [{ start: [this.sourceToken] }]
};
case 'explicit-key-ind': {
this.onKeyLine = true;
const prev = getPrevProps(parent);
const start = getFirstKeyStartProps(prev);
start.push(this.sourceToken);
return {
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start, explicitKey: true }]
};
}
case 'map-value-ind': {
this.onKeyLine = true;
const prev = getPrevProps(parent);
const start = getFirstKeyStartProps(prev);
return {
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start, key: null, sep: [this.sourceToken] }]
};
}
}
return null;
}
atIndentedComment(start, indent) {
if (this.type !== 'comment')
return false;
if (this.indent <= indent)
return false;
return start.every(st => st.type === 'newline' || st.type === 'space');
}
*documentEnd(docEnd) {
if (this.type !== 'doc-mode') {
if (docEnd.end)
docEnd.end.push(this.sourceToken);
else
docEnd.end = [this.sourceToken];
if (this.type === 'newline')
yield* this.pop();
}
}
*lineEnd(token) {
switch (this.type) {
case 'comma':
case 'doc-start':
case 'doc-end':
case 'flow-seq-end':
case 'flow-map-end':
case 'map-value-ind':
yield* this.pop();
yield* this.step();
break;
case 'newline':
this.onKeyLine = false;
// fallthrough
case 'space':
case 'comment':
default:
// all other values are errors
if (token.end)
token.end.push(this.sourceToken);
else
token.end = [this.sourceToken];
if (this.type === 'newline')
yield* this.pop();
}
}
}
function parseOptions(options) {
const prettyErrors = options.prettyErrors !== false;
const lineCounter = options.lineCounter || (prettyErrors && new LineCounter()) || null;
return { lineCounter, prettyErrors };
}
/**
* Parse the input as a stream of YAML documents.
*
* Documents should be separated from each other by `...` or `---` marker lines.
*
* @returns If an empty `docs` array is returned, it will be of type
* EmptyStream and contain additional stream information. In
* TypeScript, you should use `'empty' in docs` as a type guard for it.
*/
function parseAllDocuments(source, options = {}) {
const { lineCounter, prettyErrors } = parseOptions(options);
const parser = new Parser(lineCounter?.addNewLine);
const composer = new Composer(options);
const docs = Array.from(composer.compose(parser.parse(source)));
if (prettyErrors && lineCounter)
for (const doc of docs) {
doc.errors.forEach(prettifyError(source, lineCounter));
doc.warnings.forEach(prettifyError(source, lineCounter));
}
if (docs.length > 0)
return docs;
return Object.assign([], { empty: true }, composer.streamInfo());
}
/** Parse an input string into a single YAML.Document */
function parseDocument(source, options = {}) {
const { lineCounter, prettyErrors } = parseOptions(options);
const parser = new Parser(lineCounter?.addNewLine);
const composer = new Composer(options);
// `doc` is always set by compose.end(true) at the very latest
let doc = null;
for (const _doc of composer.compose(parser.parse(source), true, source.length)) {
if (!doc)
doc = _doc;
else if (doc.options.logLevel !== 'silent') {
doc.errors.push(new YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));
break;
}
}
if (prettyErrors && lineCounter) {
doc.errors.forEach(prettifyError(source, lineCounter));
doc.warnings.forEach(prettifyError(source, lineCounter));
}
return doc;
}
function parse$a(src, reviver, options) {
let _reviver = undefined;
if (typeof reviver === 'function') {
_reviver = reviver;
}
else if (options === undefined && reviver && typeof reviver === 'object') {
options = reviver;
}
const doc = parseDocument(src, options);
if (!doc)
return null;
doc.warnings.forEach(warning => warn(doc.options.logLevel, warning));
if (doc.errors.length > 0) {
if (doc.options.logLevel !== 'silent')
throw doc.errors[0];
else
doc.errors = [];
}
return doc.toJS(Object.assign({ reviver: _reviver }, options));
}
function stringify(value, replacer, options) {
let _replacer = null;
if (typeof replacer === 'function' || Array.isArray(replacer)) {
_replacer = replacer;
}
else if (options === undefined && replacer) {
options = replacer;
}
if (typeof options === 'string')
options = options.length;
if (typeof options === 'number') {
const indent = Math.round(options);
options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };
}
if (value === undefined) {
const { keepUndefined } = options ?? replacer ?? {};
if (!keepUndefined)
return undefined;
}
return new Document(value, _replacer, options).toString(options);
}
var YAML = {
__proto__: null,
Alias: Alias,
CST: cst,
Composer: Composer,
Document: Document,
Lexer: Lexer,
LineCounter: LineCounter,
Pair: Pair,
Parser: Parser,
Scalar: Scalar,
Schema: Schema,
YAMLError: YAMLError,
YAMLMap: YAMLMap,
YAMLParseError: YAMLParseError,
YAMLSeq: YAMLSeq,
YAMLWarning: YAMLWarning,
isAlias: isAlias,
isCollection: isCollection$1,
isDocument: isDocument,
isMap: isMap,
isNode: isNode$1,
isPair: isPair,
isScalar: isScalar$1,
isSeq: isSeq,
parse: parse$a,
parseAllDocuments: parseAllDocuments,
parseDocument: parseDocument,
stringify: stringify,
visit: visit$1,
visitAsync: visitAsync
};
// `export * as default from ...` fails on Webpack v4
// https://github.com/eemeli/yaml/issues/228
var browser$2 = {
__proto__: null,
Alias: Alias,
CST: cst,
Composer: Composer,
Document: Document,
Lexer: Lexer,
LineCounter: LineCounter,
Pair: Pair,
Parser: Parser,
Scalar: Scalar,
Schema: Schema,
YAMLError: YAMLError,
YAMLMap: YAMLMap,
YAMLParseError: YAMLParseError,
YAMLSeq: YAMLSeq,
YAMLWarning: YAMLWarning,
default: YAML,
isAlias: isAlias,
isCollection: isCollection$1,
isDocument: isDocument,
isMap: isMap,
isNode: isNode$1,
isPair: isPair,
isScalar: isScalar$1,
isSeq: isSeq,
parse: parse$a,
parseAllDocuments: parseAllDocuments,
parseDocument: parseDocument,
stringify: stringify,
visit: visit$1,
visitAsync: visitAsync
};
var require$$3 = /*@__PURE__*/getAugmentedNamespace(browser$2);
// eslint-disable-next-line n/no-deprecated-api
const { createRequire, createRequireFromPath } = require$$0$8;
function req$2 (name, rootFile) {
const create = createRequire || createRequireFromPath;
const require = create(rootFile);
return require(name)
}
var req_1 = req$2;
const req$1 = req_1;
/**
* Load Options
*
* @private
* @method options
*
* @param {Object} config PostCSS Config
*
* @return {Object} options PostCSS Options
*/
const options = (config, file) => {
if (config.parser && typeof config.parser === 'string') {
try {
config.parser = req$1(config.parser, file);
} catch (err) {
throw new Error(`Loading PostCSS Parser failed: ${err.message}\n\n(@${file})`)
}
}
if (config.syntax && typeof config.syntax === 'string') {
try {
config.syntax = req$1(config.syntax, file);
} catch (err) {
throw new Error(`Loading PostCSS Syntax failed: ${err.message}\n\n(@${file})`)
}
}
if (config.stringifier && typeof config.stringifier === 'string') {
try {
config.stringifier = req$1(config.stringifier, file);
} catch (err) {
throw new Error(`Loading PostCSS Stringifier failed: ${err.message}\n\n(@${file})`)
}
}
if (config.plugins) {
delete config.plugins;
}
return config
};
var options_1 = options;
const req = req_1;
/**
* Plugin Loader
*
* @private
* @method load
*
* @param {String} plugin PostCSS Plugin Name
* @param {Object} options PostCSS Plugin Options
*
* @return {Function} PostCSS Plugin
*/
const load = (plugin, options, file) => {
try {
if (
options === null ||
options === undefined ||
Object.keys(options).length === 0
) {
return req(plugin, file)
} else {
return req(plugin, file)(options)
}
} catch (err) {
throw new Error(`Loading PostCSS Plugin failed: ${err.message}\n\n(@${file})`)
}
};
/**
* Load Plugins
*
* @private
* @method plugins
*
* @param {Object} config PostCSS Config Plugins
*
* @return {Array} plugins PostCSS Plugins
*/
const plugins = (config, file) => {
let plugins = [];
if (Array.isArray(config.plugins)) {
plugins = config.plugins.filter(Boolean);
} else {
plugins = Object.keys(config.plugins)
.filter((plugin) => {
return config.plugins[plugin] !== false ? plugin : ''
})
.map((plugin) => {
return load(plugin, config.plugins[plugin], file)
});
}
if (plugins.length && plugins.length > 0) {
plugins.forEach((plugin, i) => {
if (plugin.default) {
plugin = plugin.default;
}
if (plugin.postcss === true) {
plugin = plugin();
} else if (plugin.postcss) {
plugin = plugin.postcss;
}
if (
// eslint-disable-next-line
!(
(typeof plugin === 'object' && Array.isArray(plugin.plugins)) ||
(typeof plugin === 'object' && plugin.postcssPlugin) ||
(typeof plugin === 'function')
)
) {
throw new TypeError(`Invalid PostCSS Plugin found at: plugins[${i}]\n\n(@${file})`)
}
});
}
return plugins
};
var plugins_1 = plugins;
const resolve = require$$0$4.resolve;
const url$4 = require$$0$9;
const config$1 = src$2;
const yaml = require$$3;
const loadOptions = options_1;
const loadPlugins = plugins_1;
/* istanbul ignore next */
const interopRequireDefault = (obj) => obj && obj.__esModule ? obj : { default: obj };
/**
* Process the result from cosmiconfig
*
* @param {Object} ctx Config Context
* @param {Object} result Cosmiconfig result
*
* @return {Object} PostCSS Config
*/
const processResult = (ctx, result) => {
const file = result.filepath || '';
let config = interopRequireDefault(result.config).default || {};
if (typeof config === 'function') {
config = config(ctx);
} else {
config = Object.assign({}, config, ctx);
}
if (!config.plugins) {
config.plugins = [];
}
return {
plugins: loadPlugins(config, file),
options: loadOptions(config, file),
file
}
};
/**
* Builds the Config Context
*
* @param {Object} ctx Config Context
*
* @return {Object} Config Context
*/
const createContext = (ctx) => {
/**
* @type {Object}
*
* @prop {String} cwd=process.cwd() Config search start location
* @prop {String} env=process.env.NODE_ENV Config Enviroment, will be set to `development` by `postcss-load-config` if `process.env.NODE_ENV` is `undefined`
*/
ctx = Object.assign({
cwd: process.cwd(),
env: process.env.NODE_ENV
}, ctx);
if (!ctx.env) {
process.env.NODE_ENV = 'development';
}
return ctx
};
const importDefault = async filepath => {
const module = await import(url$4.pathToFileURL(filepath).href);
return module.default
};
const addTypeScriptLoader = (options = {}, loader) => {
const moduleName = 'postcss';
return {
...options,
searchPlaces: [
...(options.searchPlaces || []),
'package.json',
`.${moduleName}rc`,
`.${moduleName}rc.json`,
`.${moduleName}rc.yaml`,
`.${moduleName}rc.yml`,
`.${moduleName}rc.ts`,
`.${moduleName}rc.cts`,
`.${moduleName}rc.js`,
`.${moduleName}rc.cjs`,
`.${moduleName}rc.mjs`,
`${moduleName}.config.ts`,
`${moduleName}.config.cts`,
`${moduleName}.config.js`,
`${moduleName}.config.cjs`,
`${moduleName}.config.mjs`
],
loaders: {
...options.loaders,
'.yaml': (filepath, content) => yaml.parse(content),
'.yml': (filepath, content) => yaml.parse(content),
'.js': importDefault,
'.cjs': importDefault,
'.mjs': importDefault,
'.ts': loader,
'.cts': loader
}
}
};
const withTypeScriptLoader = (rcFunc) => {
return (ctx, path, options) => {
return rcFunc(ctx, path, addTypeScriptLoader(options, (configFile) => {
let registerer = { enabled () {} };
try {
// Register TypeScript compiler instance
registerer = __require('ts-node').register({
// transpile to cjs even if compilerOptions.module in tsconfig is not Node16/NodeNext.
moduleTypes: { '**/*.cts': 'cjs' }
});
return __require(configFile)
} catch (err) {
if (err.code === 'MODULE_NOT_FOUND') {
throw new Error(
`'ts-node' is required for the TypeScript configuration files. Make sure it is installed\nError: ${err.message}`
)
}
throw err
} finally {
registerer.enabled(false);
}
}))
}
};
/**
* Load Config
*
* @method rc
*
* @param {Object} ctx Config Context
* @param {String} path Config Path
* @param {Object} options Config Options
*
* @return {Promise} config PostCSS Config
*/
const rc = withTypeScriptLoader((ctx, path, options) => {
/**
* @type {Object} The full Config Context
*/
ctx = createContext(ctx);
/**
* @type {String} `process.cwd()`
*/
path = path ? resolve(path) : process.cwd();
return config$1.lilconfig('postcss', options)
.search(path)
.then((result) => {
if (!result) {
throw new Error(`No PostCSS Config found in: ${path}`)
}
return processResult(ctx, result)
})
});
/**
* Autoload Config for PostCSS
*
* @author Michael Ciniawsky @michael-ciniawsky <michael.ciniawsky@gmail.com>
* @license MIT
*
* @module postcss-load-config
* @version 2.1.0
*
* @requires comsiconfig
* @requires ./options
* @requires ./plugins
*/
var src$1 = rc;
var postcssrc = /*@__PURE__*/getDefaultExportFromCjs(src$1);
// Copyright 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Simon Lydell
// License: MIT.
var HashbangComment, Identifier, JSXIdentifier, JSXPunctuator, JSXString, JSXText, KeywordsWithExpressionAfter, KeywordsWithNoLineTerminatorAfter, LineTerminatorSequence, MultiLineComment, Newline, NumericLiteral, Punctuator, RegularExpressionLiteral, SingleLineComment, StringLiteral, Template, TokensNotPrecedingObjectLiteral, TokensPrecedingExpression, WhiteSpace;
RegularExpressionLiteral = /\/(?![*\/])(?:\[(?:[^\]\\\n\r\u2028\u2029]+|\\.)*\]|[^\/\\\n\r\u2028\u2029]+|\\.)*(\/[$_\u200C\u200D\p{ID_Continue}]*|\\)?/yu;
Punctuator = /--|\+\+|=>|\.{3}|\??\.(?!\d)|(?:&&|\|\||\?\?|[+\-%&|^]|\*{1,2}|<{1,2}|>{1,3}|!=?|={1,2}|\/(?![\/*]))=?|[?~,:;[\](){}]/y;
Identifier = /(\x23?)(?=[$_\p{ID_Start}\\])(?:[$_\u200C\u200D\p{ID_Continue}]+|\\u[\da-fA-F]{4}|\\u\{[\da-fA-F]+\})+/yu;
StringLiteral = /(['"])(?:[^'"\\\n\r]+|(?!\1)['"]|\\(?:\r\n|[^]))*(\1)?/y;
NumericLiteral = /(?:0[xX][\da-fA-F](?:_?[\da-fA-F])*|0[oO][0-7](?:_?[0-7])*|0[bB][01](?:_?[01])*)n?|0n|[1-9](?:_?\d)*n|(?:(?:0(?!\d)|0\d*[89]\d*|[1-9](?:_?\d)*)(?:\.(?:\d(?:_?\d)*)?)?|\.\d(?:_?\d)*)(?:[eE][+-]?\d(?:_?\d)*)?|0[0-7]+/y;
Template = /[`}](?:[^`\\$]+|\\[^]|\$(?!\{))*(`|\$\{)?/y;
WhiteSpace = /[\t\v\f\ufeff\p{Zs}]+/yu;
LineTerminatorSequence = /\r?\n|[\r\u2028\u2029]/y;
MultiLineComment = /\/\*(?:[^*]+|\*(?!\/))*(\*\/)?/y;
SingleLineComment = /\/\/.*/y;
HashbangComment = /^#!.*/;
JSXPunctuator = /[<>.:={}]|\/(?![\/*])/y;
JSXIdentifier = /[$_\p{ID_Start}][$_\u200C\u200D\p{ID_Continue}-]*/yu;
JSXString = /(['"])(?:[^'"]+|(?!\1)['"])*(\1)?/y;
JSXText = /[^<>{}]+/y;
TokensPrecedingExpression = /^(?:[\/+-]|\.{3}|\?(?:InterpolationIn(?:JSX|Template)|NoLineTerminatorHere|NonExpressionParenEnd|UnaryIncDec))?$|[{}([,;<>=*%&|^!~?:]$/;
TokensNotPrecedingObjectLiteral = /^(?:=>|[;\]){}]|else|\?(?:NoLineTerminatorHere|NonExpressionParenEnd))?$/;
KeywordsWithExpressionAfter = /^(?:await|case|default|delete|do|else|instanceof|new|return|throw|typeof|void|yield)$/;
KeywordsWithNoLineTerminatorAfter = /^(?:return|throw|yield)$/;
Newline = RegExp(LineTerminatorSequence.source);
var jsTokens_1 = function*(input, {jsx = false} = {}) {
var braces, firstCodePoint, isExpression, lastIndex, lastSignificantToken, length, match, mode, nextLastIndex, nextLastSignificantToken, parenNesting, postfixIncDec, punctuator, stack;
({length} = input);
lastIndex = 0;
lastSignificantToken = "";
stack = [
{tag: "JS"}
];
braces = [];
parenNesting = 0;
postfixIncDec = false;
if (match = HashbangComment.exec(input)) {
yield ({
type: "HashbangComment",
value: match[0]
});
lastIndex = match[0].length;
}
while (lastIndex < length) {
mode = stack[stack.length - 1];
switch (mode.tag) {
case "JS":
case "JSNonExpressionParen":
case "InterpolationInTemplate":
case "InterpolationInJSX":
if (input[lastIndex] === "/" && (TokensPrecedingExpression.test(lastSignificantToken) || KeywordsWithExpressionAfter.test(lastSignificantToken))) {
RegularExpressionLiteral.lastIndex = lastIndex;
if (match = RegularExpressionLiteral.exec(input)) {
lastIndex = RegularExpressionLiteral.lastIndex;
lastSignificantToken = match[0];
postfixIncDec = true;
yield ({
type: "RegularExpressionLiteral",
value: match[0],
closed: match[1] !== void 0 && match[1] !== "\\"
});
continue;
}
}
Punctuator.lastIndex = lastIndex;
if (match = Punctuator.exec(input)) {
punctuator = match[0];
nextLastIndex = Punctuator.lastIndex;
nextLastSignificantToken = punctuator;
switch (punctuator) {
case "(":
if (lastSignificantToken === "?NonExpressionParenKeyword") {
stack.push({
tag: "JSNonExpressionParen",
nesting: parenNesting
});
}
parenNesting++;
postfixIncDec = false;
break;
case ")":
parenNesting--;
postfixIncDec = true;
if (mode.tag === "JSNonExpressionParen" && parenNesting === mode.nesting) {
stack.pop();
nextLastSignificantToken = "?NonExpressionParenEnd";
postfixIncDec = false;
}
break;
case "{":
Punctuator.lastIndex = 0;
isExpression = !TokensNotPrecedingObjectLiteral.test(lastSignificantToken) && (TokensPrecedingExpression.test(lastSignificantToken) || KeywordsWithExpressionAfter.test(lastSignificantToken));
braces.push(isExpression);
postfixIncDec = false;
break;
case "}":
switch (mode.tag) {
case "InterpolationInTemplate":
if (braces.length === mode.nesting) {
Template.lastIndex = lastIndex;
match = Template.exec(input);
lastIndex = Template.lastIndex;
lastSignificantToken = match[0];
if (match[1] === "${") {
lastSignificantToken = "?InterpolationInTemplate";
postfixIncDec = false;
yield ({
type: "TemplateMiddle",
value: match[0]
});
} else {
stack.pop();
postfixIncDec = true;
yield ({
type: "TemplateTail",
value: match[0],
closed: match[1] === "`"
});
}
continue;
}
break;
case "InterpolationInJSX":
if (braces.length === mode.nesting) {
stack.pop();
lastIndex += 1;
lastSignificantToken = "}";
yield ({
type: "JSXPunctuator",
value: "}"
});
continue;
}
}
postfixIncDec = braces.pop();
nextLastSignificantToken = postfixIncDec ? "?ExpressionBraceEnd" : "}";
break;
case "]":
postfixIncDec = true;
break;
case "++":
case "--":
nextLastSignificantToken = postfixIncDec ? "?PostfixIncDec" : "?UnaryIncDec";
break;
case "<":
if (jsx && (TokensPrecedingExpression.test(lastSignificantToken) || KeywordsWithExpressionAfter.test(lastSignificantToken))) {
stack.push({tag: "JSXTag"});
lastIndex += 1;
lastSignificantToken = "<";
yield ({
type: "JSXPunctuator",
value: punctuator
});
continue;
}
postfixIncDec = false;
break;
default:
postfixIncDec = false;
}
lastIndex = nextLastIndex;
lastSignificantToken = nextLastSignificantToken;
yield ({
type: "Punctuator",
value: punctuator
});
continue;
}
Identifier.lastIndex = lastIndex;
if (match = Identifier.exec(input)) {
lastIndex = Identifier.lastIndex;
nextLastSignificantToken = match[0];
switch (match[0]) {
case "for":
case "if":
case "while":
case "with":
if (lastSignificantToken !== "." && lastSignificantToken !== "?.") {
nextLastSignificantToken = "?NonExpressionParenKeyword";
}
}
lastSignificantToken = nextLastSignificantToken;
postfixIncDec = !KeywordsWithExpressionAfter.test(match[0]);
yield ({
type: match[1] === "#" ? "PrivateIdentifier" : "IdentifierName",
value: match[0]
});
continue;
}
StringLiteral.lastIndex = lastIndex;
if (match = StringLiteral.exec(input)) {
lastIndex = StringLiteral.lastIndex;
lastSignificantToken = match[0];
postfixIncDec = true;
yield ({
type: "StringLiteral",
value: match[0],
closed: match[2] !== void 0
});
continue;
}
NumericLiteral.lastIndex = lastIndex;
if (match = NumericLiteral.exec(input)) {
lastIndex = NumericLiteral.lastIndex;
lastSignificantToken = match[0];
postfixIncDec = true;
yield ({
type: "NumericLiteral",
value: match[0]
});
continue;
}
Template.lastIndex = lastIndex;
if (match = Template.exec(input)) {
lastIndex = Template.lastIndex;
lastSignificantToken = match[0];
if (match[1] === "${") {
lastSignificantToken = "?InterpolationInTemplate";
stack.push({
tag: "InterpolationInTemplate",
nesting: braces.length
});
postfixIncDec = false;
yield ({
type: "TemplateHead",
value: match[0]
});
} else {
postfixIncDec = true;
yield ({
type: "NoSubstitutionTemplate",
value: match[0],
closed: match[1] === "`"
});
}
continue;
}
break;
case "JSXTag":
case "JSXTagEnd":
JSXPunctuator.lastIndex = lastIndex;
if (match = JSXPunctuator.exec(input)) {
lastIndex = JSXPunctuator.lastIndex;
nextLastSignificantToken = match[0];
switch (match[0]) {
case "<":
stack.push({tag: "JSXTag"});
break;
case ">":
stack.pop();
if (lastSignificantToken === "/" || mode.tag === "JSXTagEnd") {
nextLastSignificantToken = "?JSX";
postfixIncDec = true;
} else {
stack.push({tag: "JSXChildren"});
}
break;
case "{":
stack.push({
tag: "InterpolationInJSX",
nesting: braces.length
});
nextLastSignificantToken = "?InterpolationInJSX";
postfixIncDec = false;
break;
case "/":
if (lastSignificantToken === "<") {
stack.pop();
if (stack[stack.length - 1].tag === "JSXChildren") {
stack.pop();
}
stack.push({tag: "JSXTagEnd"});
}
}
lastSignificantToken = nextLastSignificantToken;
yield ({
type: "JSXPunctuator",
value: match[0]
});
continue;
}
JSXIdentifier.lastIndex = lastIndex;
if (match = JSXIdentifier.exec(input)) {
lastIndex = JSXIdentifier.lastIndex;
lastSignificantToken = match[0];
yield ({
type: "JSXIdentifier",
value: match[0]
});
continue;
}
JSXString.lastIndex = lastIndex;
if (match = JSXString.exec(input)) {
lastIndex = JSXString.lastIndex;
lastSignificantToken = match[0];
yield ({
type: "JSXString",
value: match[0],
closed: match[2] !== void 0
});
continue;
}
break;
case "JSXChildren":
JSXText.lastIndex = lastIndex;
if (match = JSXText.exec(input)) {
lastIndex = JSXText.lastIndex;
lastSignificantToken = match[0];
yield ({
type: "JSXText",
value: match[0]
});
continue;
}
switch (input[lastIndex]) {
case "<":
stack.push({tag: "JSXTag"});
lastIndex++;
lastSignificantToken = "<";
yield ({
type: "JSXPunctuator",
value: "<"
});
continue;
case "{":
stack.push({
tag: "InterpolationInJSX",
nesting: braces.length
});
lastIndex++;
lastSignificantToken = "?InterpolationInJSX";
postfixIncDec = false;
yield ({
type: "JSXPunctuator",
value: "{"
});
continue;
}
}
WhiteSpace.lastIndex = lastIndex;
if (match = WhiteSpace.exec(input)) {
lastIndex = WhiteSpace.lastIndex;
yield ({
type: "WhiteSpace",
value: match[0]
});
continue;
}
LineTerminatorSequence.lastIndex = lastIndex;
if (match = LineTerminatorSequence.exec(input)) {
lastIndex = LineTerminatorSequence.lastIndex;
postfixIncDec = false;
if (KeywordsWithNoLineTerminatorAfter.test(lastSignificantToken)) {
lastSignificantToken = "?NoLineTerminatorHere";
}
yield ({
type: "LineTerminatorSequence",
value: match[0]
});
continue;
}
MultiLineComment.lastIndex = lastIndex;
if (match = MultiLineComment.exec(input)) {
lastIndex = MultiLineComment.lastIndex;
if (Newline.test(match[0])) {
postfixIncDec = false;
if (KeywordsWithNoLineTerminatorAfter.test(lastSignificantToken)) {
lastSignificantToken = "?NoLineTerminatorHere";
}
}
yield ({
type: "MultiLineComment",
value: match[0],
closed: match[1] !== void 0
});
continue;
}
SingleLineComment.lastIndex = lastIndex;
if (match = SingleLineComment.exec(input)) {
lastIndex = SingleLineComment.lastIndex;
postfixIncDec = false;
yield ({
type: "SingleLineComment",
value: match[0]
});
continue;
}
firstCodePoint = String.fromCodePoint(input.codePointAt(lastIndex));
lastIndex += firstCodePoint.length;
lastSignificantToken = firstCodePoint;
postfixIncDec = false;
yield ({
type: mode.tag.startsWith("JSX") ? "JSXInvalid" : "Invalid",
value: firstCodePoint
});
}
return void 0;
};
var jsTokens = /*@__PURE__*/getDefaultExportFromCjs(jsTokens_1);
function stripLiteralJsTokens(code, options) {
const FILL = " ";
const FILL_COMMENT = " ";
let result = "";
const tokens = [];
for (const token of jsTokens(code, { jsx: false })) {
tokens.push(token);
if (token.type === "SingleLineComment") {
result += FILL_COMMENT.repeat(token.value.length);
continue;
}
if (token.type === "MultiLineComment") {
result += token.value.replace(/[^\n]/g, FILL_COMMENT);
continue;
}
if (token.type === "StringLiteral") {
if (!token.closed) {
result += token.value;
continue;
}
const body = token.value.slice(1, -1);
{
result += token.value[0] + FILL.repeat(body.length) + token.value[token.value.length - 1];
continue;
}
}
if (token.type === "NoSubstitutionTemplate") {
const body = token.value.slice(1, -1);
{
result += `\`${body.replace(/[^\n]/g, FILL)}\``;
continue;
}
}
if (token.type === "RegularExpressionLiteral") {
const body = token.value;
{
result += body.replace(/\/(.*)\/(\w?)$/g, (_, $1, $2) => `/${FILL.repeat($1.length)}/${$2}`);
continue;
}
}
if (token.type === "TemplateHead") {
const body = token.value.slice(1, -2);
{
result += `\`${body.replace(/[^\n]/g, FILL)}\${`;
continue;
}
}
if (token.type === "TemplateTail") {
const body = token.value.slice(0, -2);
{
result += `}${body.replace(/[^\n]/g, FILL)}\``;
continue;
}
}
if (token.type === "TemplateMiddle") {
const body = token.value.slice(1, -2);
{
result += `}${body.replace(/[^\n]/g, FILL)}\${`;
continue;
}
}
result += token.value;
}
return {
result,
tokens
};
}
function stripLiteral(code, options) {
return stripLiteralDetailed(code).result;
}
function stripLiteralDetailed(code, options) {
return stripLiteralJsTokens(code);
}
var main$1 = {exports: {}};
var name = "dotenv";
var version$1 = "16.4.5";
var description = "Loads environment variables from .env file";
var main = "lib/main.js";
var types = "lib/main.d.ts";
var exports = {
".": {
types: "./lib/main.d.ts",
require: "./lib/main.js",
"default": "./lib/main.js"
},
"./config": "./config.js",
"./config.js": "./config.js",
"./lib/env-options": "./lib/env-options.js",
"./lib/env-options.js": "./lib/env-options.js",
"./lib/cli-options": "./lib/cli-options.js",
"./lib/cli-options.js": "./lib/cli-options.js",
"./package.json": "./package.json"
};
var scripts = {
"dts-check": "tsc --project tests/types/tsconfig.json",
lint: "standard",
"lint-readme": "standard-markdown",
pretest: "npm run lint && npm run dts-check",
test: "tap tests/*.js --100 -Rspec",
"test:coverage": "tap --coverage-report=lcov",
prerelease: "npm test",
release: "standard-version"
};
var repository = {
type: "git",
url: "git://github.com/motdotla/dotenv.git"
};
var funding = "https://dotenvx.com";
var keywords = [
"dotenv",
"env",
".env",
"environment",
"variables",
"config",
"settings"
];
var readmeFilename = "README.md";
var license = "BSD-2-Clause";
var devDependencies = {
"@definitelytyped/dtslint": "^0.0.133",
"@types/node": "^18.11.3",
decache: "^4.6.1",
sinon: "^14.0.1",
standard: "^17.0.0",
"standard-markdown": "^7.1.0",
"standard-version": "^9.5.0",
tap: "^16.3.0",
tar: "^6.1.11",
typescript: "^4.8.4"
};
var engines = {
node: ">=12"
};
var browser$1 = {
fs: false
};
var require$$4 = {
name: name,
version: version$1,
description: description,
main: main,
types: types,
exports: exports,
scripts: scripts,
repository: repository,
funding: funding,
keywords: keywords,
readmeFilename: readmeFilename,
license: license,
devDependencies: devDependencies,
engines: engines,
browser: browser$1
};
const fs$9 = require$$0__default;
const path$9 = require$$0$4;
const os$2 = require$$2;
const crypto$1 = require$$3$1;
const packageJson = require$$4;
const version = packageJson.version;
const LINE = /(?:^|^)\s*(?:export\s+)?([\w.-]+)(?:\s*=\s*?|:\s+?)(\s*'(?:\\'|[^'])*'|\s*"(?:\\"|[^"])*"|\s*`(?:\\`|[^`])*`|[^#\r\n]+)?\s*(?:#.*)?(?:$|$)/mg;
// Parse src into an Object
function parse$9 (src) {
const obj = {};
// Convert buffer to string
let lines = src.toString();
// Convert line breaks to same format
lines = lines.replace(/\r\n?/mg, '\n');
let match;
while ((match = LINE.exec(lines)) != null) {
const key = match[1];
// Default undefined or null to empty string
let value = (match[2] || '');
// Remove whitespace
value = value.trim();
// Check if double quoted
const maybeQuote = value[0];
// Remove surrounding quotes
value = value.replace(/^(['"`])([\s\S]*)\1$/mg, '$2');
// Expand newlines if double quoted
if (maybeQuote === '"') {
value = value.replace(/\\n/g, '\n');
value = value.replace(/\\r/g, '\r');
}
// Add to object
obj[key] = value;
}
return obj
}
function _parseVault (options) {
const vaultPath = _vaultPath(options);
// Parse .env.vault
const result = DotenvModule.configDotenv({ path: vaultPath });
if (!result.parsed) {
const err = new Error(`MISSING_DATA: Cannot parse ${vaultPath} for an unknown reason`);
err.code = 'MISSING_DATA';
throw err
}
// handle scenario for comma separated keys - for use with key rotation
// example: DOTENV_KEY="dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=prod,dotenv://:key_7890@dotenvx.com/vault/.env.vault?environment=prod"
const keys = _dotenvKey(options).split(',');
const length = keys.length;
let decrypted;
for (let i = 0; i < length; i++) {
try {
// Get full key
const key = keys[i].trim();
// Get instructions for decrypt
const attrs = _instructions(result, key);
// Decrypt
decrypted = DotenvModule.decrypt(attrs.ciphertext, attrs.key);
break
} catch (error) {
// last key
if (i + 1 >= length) {
throw error
}
// try next key
}
}
// Parse decrypted .env string
return DotenvModule.parse(decrypted)
}
function _log (message) {
console.log(`[dotenv@${version}][INFO] ${message}`);
}
function _warn (message) {
console.log(`[dotenv@${version}][WARN] ${message}`);
}
function _debug (message) {
console.log(`[dotenv@${version}][DEBUG] ${message}`);
}
function _dotenvKey (options) {
// prioritize developer directly setting options.DOTENV_KEY
if (options && options.DOTENV_KEY && options.DOTENV_KEY.length > 0) {
return options.DOTENV_KEY
}
// secondary infra already contains a DOTENV_KEY environment variable
if (process.env.DOTENV_KEY && process.env.DOTENV_KEY.length > 0) {
return process.env.DOTENV_KEY
}
// fallback to empty string
return ''
}
function _instructions (result, dotenvKey) {
// Parse DOTENV_KEY. Format is a URI
let uri;
try {
uri = new URL(dotenvKey);
} catch (error) {
if (error.code === 'ERR_INVALID_URL') {
const err = new Error('INVALID_DOTENV_KEY: Wrong format. Must be in valid uri format like dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=development');
err.code = 'INVALID_DOTENV_KEY';
throw err
}
throw error
}
// Get decrypt key
const key = uri.password;
if (!key) {
const err = new Error('INVALID_DOTENV_KEY: Missing key part');
err.code = 'INVALID_DOTENV_KEY';
throw err
}
// Get environment
const environment = uri.searchParams.get('environment');
if (!environment) {
const err = new Error('INVALID_DOTENV_KEY: Missing environment part');
err.code = 'INVALID_DOTENV_KEY';
throw err
}
// Get ciphertext payload
const environmentKey = `DOTENV_VAULT_${environment.toUpperCase()}`;
const ciphertext = result.parsed[environmentKey]; // DOTENV_VAULT_PRODUCTION
if (!ciphertext) {
const err = new Error(`NOT_FOUND_DOTENV_ENVIRONMENT: Cannot locate environment ${environmentKey} in your .env.vault file.`);
err.code = 'NOT_FOUND_DOTENV_ENVIRONMENT';
throw err
}
return { ciphertext, key }
}
function _vaultPath (options) {
let possibleVaultPath = null;
if (options && options.path && options.path.length > 0) {
if (Array.isArray(options.path)) {
for (const filepath of options.path) {
if (fs$9.existsSync(filepath)) {
possibleVaultPath = filepath.endsWith('.vault') ? filepath : `${filepath}.vault`;
}
}
} else {
possibleVaultPath = options.path.endsWith('.vault') ? options.path : `${options.path}.vault`;
}
} else {
possibleVaultPath = path$9.resolve(process.cwd(), '.env.vault');
}
if (fs$9.existsSync(possibleVaultPath)) {
return possibleVaultPath
}
return null
}
function _resolveHome (envPath) {
return envPath[0] === '~' ? path$9.join(os$2.homedir(), envPath.slice(1)) : envPath
}
function _configVault (options) {
_log('Loading env from encrypted .env.vault');
const parsed = DotenvModule._parseVault(options);
let processEnv = process.env;
if (options && options.processEnv != null) {
processEnv = options.processEnv;
}
DotenvModule.populate(processEnv, parsed, options);
return { parsed }
}
function configDotenv (options) {
const dotenvPath = path$9.resolve(process.cwd(), '.env');
let encoding = 'utf8';
const debug = Boolean(options && options.debug);
if (options && options.encoding) {
encoding = options.encoding;
} else {
if (debug) {
_debug('No encoding is specified. UTF-8 is used by default');
}
}
let optionPaths = [dotenvPath]; // default, look for .env
if (options && options.path) {
if (!Array.isArray(options.path)) {
optionPaths = [_resolveHome(options.path)];
} else {
optionPaths = []; // reset default
for (const filepath of options.path) {
optionPaths.push(_resolveHome(filepath));
}
}
}
// Build the parsed data in a temporary object (because we need to return it). Once we have the final
// parsed data, we will combine it with process.env (or options.processEnv if provided).
let lastError;
const parsedAll = {};
for (const path of optionPaths) {
try {
// Specifying an encoding returns a string instead of a buffer
const parsed = DotenvModule.parse(fs$9.readFileSync(path, { encoding }));
DotenvModule.populate(parsedAll, parsed, options);
} catch (e) {
if (debug) {
_debug(`Failed to load ${path} ${e.message}`);
}
lastError = e;
}
}
let processEnv = process.env;
if (options && options.processEnv != null) {
processEnv = options.processEnv;
}
DotenvModule.populate(processEnv, parsedAll, options);
if (lastError) {
return { parsed: parsedAll, error: lastError }
} else {
return { parsed: parsedAll }
}
}
// Populates process.env from .env file
function config (options) {
// fallback to original dotenv if DOTENV_KEY is not set
if (_dotenvKey(options).length === 0) {
return DotenvModule.configDotenv(options)
}
const vaultPath = _vaultPath(options);
// dotenvKey exists but .env.vault file does not exist
if (!vaultPath) {
_warn(`You set DOTENV_KEY but you are missing a .env.vault file at ${vaultPath}. Did you forget to build it?`);
return DotenvModule.configDotenv(options)
}
return DotenvModule._configVault(options)
}
function decrypt (encrypted, keyStr) {
const key = Buffer.from(keyStr.slice(-64), 'hex');
let ciphertext = Buffer.from(encrypted, 'base64');
const nonce = ciphertext.subarray(0, 12);
const authTag = ciphertext.subarray(-16);
ciphertext = ciphertext.subarray(12, -16);
try {
const aesgcm = crypto$1.createDecipheriv('aes-256-gcm', key, nonce);
aesgcm.setAuthTag(authTag);
return `${aesgcm.update(ciphertext)}${aesgcm.final()}`
} catch (error) {
const isRange = error instanceof RangeError;
const invalidKeyLength = error.message === 'Invalid key length';
const decryptionFailed = error.message === 'Unsupported state or unable to authenticate data';
if (isRange || invalidKeyLength) {
const err = new Error('INVALID_DOTENV_KEY: It must be 64 characters long (or more)');
err.code = 'INVALID_DOTENV_KEY';
throw err
} else if (decryptionFailed) {
const err = new Error('DECRYPTION_FAILED: Please check your DOTENV_KEY');
err.code = 'DECRYPTION_FAILED';
throw err
} else {
throw error
}
}
}
// Populate process.env with parsed values
function populate (processEnv, parsed, options = {}) {
const debug = Boolean(options && options.debug);
const override = Boolean(options && options.override);
if (typeof parsed !== 'object') {
const err = new Error('OBJECT_REQUIRED: Please check the processEnv argument being passed to populate');
err.code = 'OBJECT_REQUIRED';
throw err
}
// Set process.env
for (const key of Object.keys(parsed)) {
if (Object.prototype.hasOwnProperty.call(processEnv, key)) {
if (override === true) {
processEnv[key] = parsed[key];
}
if (debug) {
if (override === true) {
_debug(`"${key}" is already defined and WAS overwritten`);
} else {
_debug(`"${key}" is already defined and was NOT overwritten`);
}
}
} else {
processEnv[key] = parsed[key];
}
}
}
const DotenvModule = {
configDotenv,
_configVault,
_parseVault,
config,
decrypt,
parse: parse$9,
populate
};
main$1.exports.configDotenv = DotenvModule.configDotenv;
main$1.exports._configVault = DotenvModule._configVault;
main$1.exports._parseVault = DotenvModule._parseVault;
main$1.exports.config = DotenvModule.config;
main$1.exports.decrypt = DotenvModule.decrypt;
var parse_1$1 = main$1.exports.parse = DotenvModule.parse;
main$1.exports.populate = DotenvModule.populate;
main$1.exports = DotenvModule;
// * /
// * (\\)? # is it escaped with a backslash?
// * (\$) # literal $
// * (?!\() # shouldnt be followed by parenthesis
// * (\{?) # first brace wrap opening
// * ([\w.]+) # key
// * (?::-((?:\$\{(?:\$\{(?:\$\{[^}]*\}|[^}])*}|[^}])*}|[^}])+))? # optional default nested 3 times
// * (\}?) # last brace warp closing
// * /xi
const DOTENV_SUBSTITUTION_REGEX = /(\\)?(\$)(?!\()(\{?)([\w.]+)(?::?-((?:\$\{(?:\$\{(?:\$\{[^}]*\}|[^}])*}|[^}])*}|[^}])+))?(\}?)/gi;
function _resolveEscapeSequences (value) {
return value.replace(/\\\$/g, '$')
}
function interpolate (value, processEnv, parsed) {
return value.replace(DOTENV_SUBSTITUTION_REGEX, (match, escaped, dollarSign, openBrace, key, defaultValue, closeBrace) => {
if (escaped === '\\') {
return match.slice(1)
} else {
if (processEnv[key]) {
if (processEnv[key] === parsed[key]) {
return processEnv[key]
} else {
// scenario: PASSWORD_EXPAND_NESTED=${PASSWORD_EXPAND}
return interpolate(processEnv[key], processEnv, parsed)
}
}
if (parsed[key]) {
// avoid recursion from EXPAND_SELF=$EXPAND_SELF
if (parsed[key] === value) {
return parsed[key]
} else {
return interpolate(parsed[key], processEnv, parsed)
}
}
if (defaultValue) {
if (defaultValue.startsWith('$')) {
return interpolate(defaultValue, processEnv, parsed)
} else {
return defaultValue
}
}
return ''
}
})
}
function expand (options) {
let processEnv = process.env;
if (options && options.processEnv != null) {
processEnv = options.processEnv;
}
for (const key in options.parsed) {
let value = options.parsed[key];
const inProcessEnv = Object.prototype.hasOwnProperty.call(processEnv, key);
if (inProcessEnv) {
if (processEnv[key] === options.parsed[key]) {
// assume was set to processEnv from the .env file if the values match and therefore interpolate
value = interpolate(value, processEnv, options.parsed);
} else {
// do not interpolate - assume processEnv had the intended value even if containing a $.
value = processEnv[key];
}
} else {
// not inProcessEnv so assume interpolation for this .env key
value = interpolate(value, processEnv, options.parsed);
}
options.parsed[key] = _resolveEscapeSequences(value);
}
for (const processKey in options.parsed) {
processEnv[processKey] = options.parsed[processKey];
}
return options
}
var expand_1 = expand;
function getEnvFilesForMode(mode, envDir) {
return [
/** default file */
`.env`,
/** local file */
`.env.local`,
/** mode file */
`.env.${mode}`,
/** mode local file */
`.env.${mode}.local`
].map((file) => normalizePath$3(path$n.join(envDir, file)));
}
function loadEnv(mode, envDir, prefixes = "VITE_") {
if (mode === "local") {
throw new Error(
`"local" cannot be used as a mode name because it conflicts with the .local postfix for .env files.`
);
}
prefixes = arraify(prefixes);
const env = {};
const envFiles = getEnvFilesForMode(mode, envDir);
const parsed = Object.fromEntries(
envFiles.flatMap((filePath) => {
if (!tryStatSync(filePath)?.isFile()) return [];
return Object.entries(parse_1$1(fs__default.readFileSync(filePath)));
})
);
if (parsed.NODE_ENV && process.env.VITE_USER_NODE_ENV === void 0) {
process.env.VITE_USER_NODE_ENV = parsed.NODE_ENV;
}
if (parsed.BROWSER && process.env.BROWSER === void 0) {
process.env.BROWSER = parsed.BROWSER;
}
if (parsed.BROWSER_ARGS && process.env.BROWSER_ARGS === void 0) {
process.env.BROWSER_ARGS = parsed.BROWSER_ARGS;
}
const processEnv = { ...process.env };
expand_1({ parsed, processEnv });
for (const [key, value] of Object.entries(parsed)) {
if (prefixes.some((prefix) => key.startsWith(prefix))) {
env[key] = value;
}
}
for (const key in process.env) {
if (prefixes.some((prefix) => key.startsWith(prefix))) {
env[key] = process.env[key];
}
}
return env;
}
function resolveEnvPrefix({
envPrefix = "VITE_"
}) {
envPrefix = arraify(envPrefix);
if (envPrefix.includes("")) {
throw new Error(
`envPrefix option contains value '', which could lead unexpected exposure of sensitive information.`
);
}
return envPrefix;
}
const modulePreloadPolyfillId = "vite/modulepreload-polyfill";
const resolvedModulePreloadPolyfillId = "\0" + modulePreloadPolyfillId + ".js";
function modulePreloadPolyfillPlugin(config) {
const skip = config.command !== "build" || config.build.ssr;
let polyfillString;
return {
name: "vite:modulepreload-polyfill",
resolveId(id) {
if (id === modulePreloadPolyfillId) {
return resolvedModulePreloadPolyfillId;
}
},
load(id) {
if (id === resolvedModulePreloadPolyfillId) {
if (skip) {
return "";
}
if (!polyfillString) {
polyfillString = `${isModernFlag}&&(${polyfill.toString()}());`;
}
return { code: polyfillString, moduleSideEffects: true };
}
}
};
}
function polyfill() {
const relList = document.createElement("link").relList;
if (relList && relList.supports && relList.supports("modulepreload")) {
return;
}
for (const link of document.querySelectorAll('link[rel="modulepreload"]')) {
processPreload(link);
}
new MutationObserver((mutations) => {
for (const mutation of mutations) {
if (mutation.type !== "childList") {
continue;
}
for (const node of mutation.addedNodes) {
if (node.tagName === "LINK" && node.rel === "modulepreload")
processPreload(node);
}
}
}).observe(document, { childList: true, subtree: true });
function getFetchOpts(link) {
const fetchOpts = {};
if (link.integrity) fetchOpts.integrity = link.integrity;
if (link.referrerPolicy) fetchOpts.referrerPolicy = link.referrerPolicy;
if (link.crossOrigin === "use-credentials")
fetchOpts.credentials = "include";
else if (link.crossOrigin === "anonymous") fetchOpts.credentials = "omit";
else fetchOpts.credentials = "same-origin";
return fetchOpts;
}
function processPreload(link) {
if (link.ep)
return;
link.ep = true;
const fetchOpts = getFetchOpts(link);
fetch(link.href, fetchOpts);
}
}
const htmlProxyRE$1 = /\?html-proxy=?(?:&inline-css)?(?:&style-attr)?&index=(\d+)\.(?:js|css)$/;
const isHtmlProxyRE = /\?html-proxy\b/;
const inlineCSSRE$1 = /__VITE_INLINE_CSS__([a-z\d]{8}_\d+)__/g;
const inlineImportRE = /(?<!(?<!\.\.)\.)\bimport\s*\(("(?:[^"]|(?<=\\)")*"|'(?:[^']|(?<=\\)')*')\)/dg;
const htmlLangRE = /\.(?:html|htm)$/;
const spaceRe = /[\t\n\f\r ]/;
const importMapRE = /[ \t]*<script[^>]*type\s*=\s*(?:"importmap"|'importmap'|importmap)[^>]*>.*?<\/script>/is;
const moduleScriptRE = /[ \t]*<script[^>]*type\s*=\s*(?:"module"|'module'|module)[^>]*>/i;
const modulePreloadLinkRE = /[ \t]*<link[^>]*rel\s*=\s*(?:"modulepreload"|'modulepreload'|modulepreload)[\s\S]*?\/>/i;
const importMapAppendRE = new RegExp(
[moduleScriptRE, modulePreloadLinkRE].map((r) => r.source).join("|"),
"i"
);
const isHTMLProxy = (id) => isHtmlProxyRE.test(id);
const isHTMLRequest = (request) => htmlLangRE.test(request);
const htmlProxyMap = /* @__PURE__ */ new WeakMap();
const htmlProxyResult = /* @__PURE__ */ new Map();
function htmlInlineProxyPlugin(config) {
htmlProxyMap.set(config, /* @__PURE__ */ new Map());
return {
name: "vite:html-inline-proxy",
resolveId(id) {
if (isHTMLProxy(id)) {
return id;
}
},
load(id) {
const proxyMatch = htmlProxyRE$1.exec(id);
if (proxyMatch) {
const index = Number(proxyMatch[1]);
const file = cleanUrl(id);
const url = file.replace(normalizePath$3(config.root), "");
const result = htmlProxyMap.get(config).get(url)?.[index];
if (result) {
return result;
} else {
throw new Error(`No matching HTML proxy module found from ${id}`);
}
}
}
};
}
function addToHTMLProxyCache(config, filePath, index, result) {
if (!htmlProxyMap.get(config)) {
htmlProxyMap.set(config, /* @__PURE__ */ new Map());
}
if (!htmlProxyMap.get(config).get(filePath)) {
htmlProxyMap.get(config).set(filePath, []);
}
htmlProxyMap.get(config).get(filePath)[index] = result;
}
function addToHTMLProxyTransformResult(hash, code) {
htmlProxyResult.set(hash, code);
}
const assetAttrsConfig = {
link: ["href"],
video: ["src", "poster"],
source: ["src", "srcset"],
img: ["src", "srcset"],
image: ["xlink:href", "href"],
use: ["xlink:href", "href"]
};
const noInlineLinkRels = /* @__PURE__ */ new Set([
"icon",
"apple-touch-icon",
"apple-touch-startup-image",
"manifest"
]);
const isAsyncScriptMap = /* @__PURE__ */ new WeakMap();
function nodeIsElement(node) {
return node.nodeName[0] !== "#";
}
function traverseNodes(node, visitor) {
visitor(node);
if (nodeIsElement(node) || node.nodeName === "#document" || node.nodeName === "#document-fragment") {
node.childNodes.forEach((childNode) => traverseNodes(childNode, visitor));
}
}
async function traverseHtml(html, filePath, visitor) {
const { parse } = await import('./dep-D-7KCb9p.js');
const ast = parse(html, {
scriptingEnabled: false,
// parse inside <noscript>
sourceCodeLocationInfo: true,
onParseError: (e) => {
handleParseError(e, html, filePath);
}
});
traverseNodes(ast, visitor);
}
function getScriptInfo(node) {
let src;
let sourceCodeLocation;
let isModule = false;
let isAsync = false;
for (const p of node.attrs) {
if (p.prefix !== void 0) continue;
if (p.name === "src") {
if (!src) {
src = p;
sourceCodeLocation = node.sourceCodeLocation?.attrs["src"];
}
} else if (p.name === "type" && p.value && p.value === "module") {
isModule = true;
} else if (p.name === "async") {
isAsync = true;
}
}
return { src, sourceCodeLocation, isModule, isAsync };
}
const attrValueStartRE = /=\s*(.)/;
function overwriteAttrValue(s, sourceCodeLocation, newValue) {
const srcString = s.slice(
sourceCodeLocation.startOffset,
sourceCodeLocation.endOffset
);
const valueStart = attrValueStartRE.exec(srcString);
if (!valueStart) {
throw new Error(
`[vite:html] internal error, failed to overwrite attribute value`
);
}
const wrapOffset = valueStart[1] === '"' || valueStart[1] === "'" ? 1 : 0;
const valueOffset = valueStart.index + valueStart[0].length - 1;
s.update(
sourceCodeLocation.startOffset + valueOffset + wrapOffset,
sourceCodeLocation.endOffset - wrapOffset,
newValue
);
return s;
}
function formatParseError(parserError, id, html) {
const formattedError = {
code: parserError.code,
message: `parse5 error code ${parserError.code}`,
frame: generateCodeFrame(
html,
parserError.startOffset,
parserError.endOffset
),
loc: {
file: id,
line: parserError.startLine,
column: parserError.startCol
}
};
return formattedError;
}
function handleParseError(parserError, html, filePath) {
switch (parserError.code) {
case "missing-doctype":
return;
case "abandoned-head-element-child":
return;
case "duplicate-attribute":
return;
case "non-void-html-element-start-tag-with-trailing-solidus":
return;
}
const parseError = formatParseError(parserError, filePath, html);
throw new Error(
`Unable to parse HTML; ${parseError.message}
at ${parseError.loc.file}:${parseError.loc.line}:${parseError.loc.column}
${parseError.frame}`
);
}
function buildHtmlPlugin(config) {
const [preHooks, normalHooks, postHooks] = resolveHtmlTransforms(
config.plugins,
config.logger
);
preHooks.unshift(injectCspNonceMetaTagHook(config));
preHooks.unshift(preImportMapHook(config));
preHooks.push(htmlEnvHook(config));
postHooks.push(injectNonceAttributeTagHook(config));
postHooks.push(postImportMapHook());
const processedHtml = /* @__PURE__ */ new Map();
const isExcludedUrl = (url) => url[0] === "#" || isExternalUrl(url) || isDataUrl(url);
isAsyncScriptMap.set(config, /* @__PURE__ */ new Map());
return {
name: "vite:build-html",
async transform(html, id) {
if (id.endsWith(".html")) {
id = normalizePath$3(id);
const relativeUrlPath = normalizePath$3(path$n.relative(config.root, id));
const publicPath = `/${relativeUrlPath}`;
const publicBase = getBaseInHTML(relativeUrlPath, config);
const publicToRelative = (filename, importer) => publicBase + filename;
const toOutputPublicFilePath = (url) => toOutputFilePathInHtml(
url.slice(1),
"public",
relativeUrlPath,
"html",
config,
publicToRelative
);
const nodeStartWithLeadingWhitespace = (node) => {
const startOffset = node.sourceCodeLocation.startOffset;
if (startOffset === 0) return 0;
const lineStartOffset = startOffset - node.sourceCodeLocation.startCol;
let isLineEmpty = false;
try {
const line = s.slice(Math.max(0, lineStartOffset), startOffset);
isLineEmpty = !line.trim();
} catch {
}
return isLineEmpty ? lineStartOffset : startOffset;
};
html = await applyHtmlTransforms(html, preHooks, {
path: publicPath,
filename: id
});
let js = "";
const s = new MagicString(html);
const scriptUrls = [];
const styleUrls = [];
let inlineModuleIndex = -1;
let everyScriptIsAsync = true;
let someScriptsAreAsync = false;
let someScriptsAreDefer = false;
const assetUrlsPromises = [];
const namedOutput = Object.keys(
config?.build?.rollupOptions?.input || {}
);
const processAssetUrl = async (url, shouldInline) => {
if (url !== "" && // Empty attribute
!namedOutput.includes(url) && // Direct reference to named output
!namedOutput.includes(removeLeadingSlash(url))) {
try {
return await urlToBuiltUrl(url, id, config, this, shouldInline);
} catch (e) {
if (e.code !== "ENOENT") {
throw e;
}
}
}
return url;
};
await traverseHtml(html, id, (node) => {
if (!nodeIsElement(node)) {
return;
}
let shouldRemove = false;
if (node.nodeName === "script") {
const { src, sourceCodeLocation, isModule, isAsync } = getScriptInfo(node);
const url = src && src.value;
const isPublicFile = !!(url && checkPublicFile(url, config));
if (isPublicFile) {
overwriteAttrValue(
s,
sourceCodeLocation,
partialEncodeURIPath(toOutputPublicFilePath(url))
);
}
if (isModule) {
inlineModuleIndex++;
if (url && !isExcludedUrl(url) && !isPublicFile) {
js += `
import ${JSON.stringify(url)}`;
shouldRemove = true;
} else if (node.childNodes.length) {
const scriptNode = node.childNodes.pop();
const contents = scriptNode.value;
const filePath = id.replace(normalizePath$3(config.root), "");
addToHTMLProxyCache(config, filePath, inlineModuleIndex, {
code: contents
});
js += `
import "${id}?html-proxy&index=${inlineModuleIndex}.js"`;
shouldRemove = true;
}
everyScriptIsAsync &&= isAsync;
someScriptsAreAsync ||= isAsync;
someScriptsAreDefer ||= !isAsync;
} else if (url && !isPublicFile) {
if (!isExcludedUrl(url)) {
config.logger.warn(
`<script src="${url}"> in "${publicPath}" can't be bundled without type="module" attribute`
);
}
} else if (node.childNodes.length) {
const scriptNode = node.childNodes.pop();
scriptUrls.push(
...extractImportExpressionFromClassicScript(scriptNode)
);
}
}
const assetAttrs = assetAttrsConfig[node.nodeName];
if (assetAttrs) {
for (const p of node.attrs) {
const attrKey = getAttrKey(p);
if (p.value && assetAttrs.includes(attrKey)) {
if (attrKey === "srcset") {
assetUrlsPromises.push(
(async () => {
const processedEncodedUrl = await processSrcSet(
p.value,
async ({ url }) => {
const decodedUrl = decodeURI(url);
if (!isExcludedUrl(decodedUrl)) {
const result = await processAssetUrl(url);
return result !== decodedUrl ? encodeURIPath(result) : url;
}
return url;
}
);
if (processedEncodedUrl !== p.value) {
overwriteAttrValue(
s,
getAttrSourceCodeLocation(node, attrKey),
processedEncodedUrl
);
}
})()
);
} else {
const url = decodeURI(p.value);
if (checkPublicFile(url, config)) {
overwriteAttrValue(
s,
getAttrSourceCodeLocation(node, attrKey),
partialEncodeURIPath(toOutputPublicFilePath(url))
);
} else if (!isExcludedUrl(url)) {
if (node.nodeName === "link" && isCSSRequest(url) && // should not be converted if following attributes are present (#6748)
!node.attrs.some(
(p2) => p2.prefix === void 0 && (p2.name === "media" || p2.name === "disabled")
)) {
const importExpression = `
import ${JSON.stringify(url)}`;
styleUrls.push({
url,
start: nodeStartWithLeadingWhitespace(node),
end: node.sourceCodeLocation.endOffset
});
js += importExpression;
} else {
const isNoInlineLink = node.nodeName === "link" && node.attrs.some(
(p2) => p2.name === "rel" && parseRelAttr(p2.value).some(
(v) => noInlineLinkRels.has(v)
)
);
const shouldInline = isNoInlineLink ? false : void 0;
assetUrlsPromises.push(
(async () => {
const processedUrl = await processAssetUrl(
url,
shouldInline
);
if (processedUrl !== url) {
overwriteAttrValue(
s,
getAttrSourceCodeLocation(node, attrKey),
partialEncodeURIPath(processedUrl)
);
}
})()
);
}
}
}
}
}
}
const inlineStyle = findNeedTransformStyleAttribute(node);
if (inlineStyle) {
inlineModuleIndex++;
const code = inlineStyle.attr.value;
const filePath = id.replace(normalizePath$3(config.root), "");
addToHTMLProxyCache(config, filePath, inlineModuleIndex, { code });
js += `
import "${id}?html-proxy&inline-css&style-attr&index=${inlineModuleIndex}.css"`;
const hash = getHash(cleanUrl(id));
overwriteAttrValue(
s,
inlineStyle.location,
`__VITE_INLINE_CSS__${hash}_${inlineModuleIndex}__`
);
}
if (node.nodeName === "style" && node.childNodes.length) {
const styleNode = node.childNodes.pop();
const filePath = id.replace(normalizePath$3(config.root), "");
inlineModuleIndex++;
addToHTMLProxyCache(config, filePath, inlineModuleIndex, {
code: styleNode.value
});
js += `
import "${id}?html-proxy&inline-css&index=${inlineModuleIndex}.css"`;
const hash = getHash(cleanUrl(id));
s.update(
styleNode.sourceCodeLocation.startOffset,
styleNode.sourceCodeLocation.endOffset,
`__VITE_INLINE_CSS__${hash}_${inlineModuleIndex}__`
);
}
if (shouldRemove) {
s.remove(
nodeStartWithLeadingWhitespace(node),
node.sourceCodeLocation.endOffset
);
}
});
isAsyncScriptMap.get(config).set(id, everyScriptIsAsync);
if (someScriptsAreAsync && someScriptsAreDefer) {
config.logger.warn(
`
Mixed async and defer script modules in ${id}, output script will fallback to defer. Every script, including inline ones, need to be marked as async for your output script to be async.`
);
}
await Promise.all(assetUrlsPromises);
for (const { start, end, url } of scriptUrls) {
if (checkPublicFile(url, config)) {
s.update(
start,
end,
partialEncodeURIPath(toOutputPublicFilePath(url))
);
} else if (!isExcludedUrl(url)) {
s.update(
start,
end,
partialEncodeURIPath(await urlToBuiltUrl(url, id, config, this))
);
}
}
const resolvedStyleUrls = await Promise.all(
styleUrls.map(async (styleUrl) => ({
...styleUrl,
resolved: await this.resolve(styleUrl.url, id)
}))
);
for (const { start, end, url, resolved } of resolvedStyleUrls) {
if (resolved == null) {
config.logger.warnOnce(
`
${url} doesn't exist at build time, it will remain unchanged to be resolved at runtime`
);
const importExpression = `
import ${JSON.stringify(url)}`;
js = js.replace(importExpression, "");
} else {
s.remove(start, end);
}
}
processedHtml.set(id, s.toString());
const { modulePreload } = config.build;
if (modulePreload !== false && modulePreload.polyfill && (someScriptsAreAsync || someScriptsAreDefer)) {
js = `import "${modulePreloadPolyfillId}";
${js}`;
}
return { code: js, moduleSideEffects: "no-treeshake" };
}
},
async generateBundle(options, bundle) {
const analyzedChunk = /* @__PURE__ */ new Map();
const inlineEntryChunk = /* @__PURE__ */ new Set();
const getImportedChunks = (chunk, seen = /* @__PURE__ */ new Set()) => {
const chunks = [];
chunk.imports.forEach((file) => {
const importee = bundle[file];
if (importee?.type === "chunk" && !seen.has(file)) {
seen.add(file);
chunks.push(...getImportedChunks(importee, seen));
chunks.push(importee);
}
});
return chunks;
};
const toScriptTag = (chunk, toOutputPath, isAsync) => ({
tag: "script",
attrs: {
...isAsync ? { async: true } : {},
type: "module",
// crossorigin must be set not only for serving assets in a different origin
// but also to make it possible to preload the script using `<link rel="preload">`.
// `<script type="module">` used to fetch the script with credential mode `omit`,
// however `crossorigin` attribute cannot specify that value.
// https://developer.chrome.com/blog/modulepreload/#ok-so-why-doesnt-link-relpreload-work-for-modules:~:text=For%20%3Cscript%3E,of%20other%20modules.
// Now `<script type="module">` uses `same origin`: https://github.com/whatwg/html/pull/3656#:~:text=Module%20scripts%20are%20always%20fetched%20with%20credentials%20mode%20%22same%2Dorigin%22%20by%20default%20and%20can%20no%20longer%0Ause%20%22omit%22
crossorigin: true,
src: toOutputPath(chunk.fileName)
}
});
const toPreloadTag = (filename, toOutputPath) => ({
tag: "link",
attrs: {
rel: "modulepreload",
crossorigin: true,
href: toOutputPath(filename)
}
});
const getCssTagsForChunk = (chunk, toOutputPath, seen = /* @__PURE__ */ new Set()) => {
const tags = [];
if (!analyzedChunk.has(chunk)) {
analyzedChunk.set(chunk, 1);
chunk.imports.forEach((file) => {
const importee = bundle[file];
if (importee?.type === "chunk") {
tags.push(...getCssTagsForChunk(importee, toOutputPath, seen));
}
});
}
chunk.viteMetadata.importedCss.forEach((file) => {
if (!seen.has(file)) {
seen.add(file);
tags.push({
tag: "link",
attrs: {
rel: "stylesheet",
crossorigin: true,
href: toOutputPath(file)
}
});
}
});
return tags;
};
for (const [normalizedId, html] of processedHtml) {
const relativeUrlPath = normalizePath$3(
path$n.relative(config.root, normalizedId)
);
const assetsBase = getBaseInHTML(relativeUrlPath, config);
const toOutputFilePath = (filename, type) => {
if (isExternalUrl(filename)) {
return filename;
} else {
return toOutputFilePathInHtml(
filename,
type,
relativeUrlPath,
"html",
config,
(filename2, importer) => assetsBase + filename2
);
}
};
const toOutputAssetFilePath = (filename) => toOutputFilePath(filename, "asset");
const toOutputPublicAssetFilePath = (filename) => toOutputFilePath(filename, "public");
const isAsync = isAsyncScriptMap.get(config).get(normalizedId);
let result = html;
const chunk = Object.values(bundle).find(
(chunk2) => chunk2.type === "chunk" && chunk2.isEntry && chunk2.facadeModuleId && normalizePath$3(chunk2.facadeModuleId) === normalizedId
);
let canInlineEntry = false;
if (chunk) {
if (options.format === "es" && isEntirelyImport(chunk.code)) {
canInlineEntry = true;
}
const imports = getImportedChunks(chunk);
let assetTags;
if (canInlineEntry) {
assetTags = imports.map(
(chunk2) => toScriptTag(chunk2, toOutputAssetFilePath, isAsync)
);
} else {
assetTags = [toScriptTag(chunk, toOutputAssetFilePath, isAsync)];
const { modulePreload } = config.build;
if (modulePreload !== false) {
const resolveDependencies = typeof modulePreload === "object" && modulePreload.resolveDependencies;
const importsFileNames = imports.map((chunk2) => chunk2.fileName);
const resolvedDeps = resolveDependencies ? resolveDependencies(chunk.fileName, importsFileNames, {
hostId: relativeUrlPath,
hostType: "html"
}) : importsFileNames;
assetTags.push(
...resolvedDeps.map(
(i) => toPreloadTag(i, toOutputAssetFilePath)
)
);
}
}
assetTags.push(...getCssTagsForChunk(chunk, toOutputAssetFilePath));
result = injectToHead(result, assetTags);
}
if (!config.build.cssCodeSplit) {
const cssChunk = Object.values(bundle).find(
(chunk2) => chunk2.type === "asset" && chunk2.name === "style.css"
);
if (cssChunk) {
result = injectToHead(result, [
{
tag: "link",
attrs: {
rel: "stylesheet",
crossorigin: true,
href: toOutputAssetFilePath(cssChunk.fileName)
}
}
]);
}
}
let match;
let s;
inlineCSSRE$1.lastIndex = 0;
while (match = inlineCSSRE$1.exec(result)) {
s ||= new MagicString(result);
const { 0: full, 1: scopedName } = match;
const cssTransformedCode = htmlProxyResult.get(scopedName);
s.update(match.index, match.index + full.length, cssTransformedCode);
}
if (s) {
result = s.toString();
}
result = await applyHtmlTransforms(
result,
[...normalHooks, ...postHooks],
{
path: "/" + relativeUrlPath,
filename: normalizedId,
bundle,
chunk
}
);
result = result.replace(assetUrlRE, (_, fileHash, postfix = "") => {
const file = this.getFileName(fileHash);
if (chunk) {
chunk.viteMetadata.importedAssets.add(cleanUrl(file));
}
return encodeURIPath(toOutputAssetFilePath(file)) + postfix;
});
result = result.replace(publicAssetUrlRE, (_, fileHash) => {
const publicAssetPath = toOutputPublicAssetFilePath(
getPublicAssetFilename(fileHash, config)
);
return encodeURIPath(
urlCanParse(publicAssetPath) ? publicAssetPath : normalizePath$3(publicAssetPath)
);
});
if (chunk && canInlineEntry) {
inlineEntryChunk.add(chunk.fileName);
}
const shortEmitName = normalizePath$3(
path$n.relative(config.root, normalizedId)
);
this.emitFile({
type: "asset",
originalFileName: normalizedId,
fileName: shortEmitName,
source: result
});
}
for (const fileName of inlineEntryChunk) {
delete bundle[fileName];
}
}
};
}
function parseRelAttr(attr) {
return attr.split(spaceRe).map((v) => v.toLowerCase());
}
function findNeedTransformStyleAttribute(node) {
const attr = node.attrs.find(
(prop) => prop.prefix === void 0 && prop.name === "style" && // only url(...) or image-set(...) in css need to emit file
(prop.value.includes("url(") || prop.value.includes("image-set("))
);
if (!attr) return void 0;
const location = node.sourceCodeLocation?.attrs?.["style"];
return { attr, location };
}
function extractImportExpressionFromClassicScript(scriptTextNode) {
const startOffset = scriptTextNode.sourceCodeLocation.startOffset;
const cleanCode = stripLiteral(scriptTextNode.value);
const scriptUrls = [];
let match;
inlineImportRE.lastIndex = 0;
while (match = inlineImportRE.exec(cleanCode)) {
const [, [urlStart, urlEnd]] = match.indices;
const start = urlStart + 1;
const end = urlEnd - 1;
scriptUrls.push({
start: start + startOffset,
end: end + startOffset,
url: scriptTextNode.value.slice(start, end)
});
}
return scriptUrls;
}
function preImportMapHook(config) {
return (html, ctx) => {
const importMapIndex = html.search(importMapRE);
if (importMapIndex < 0) return;
const importMapAppendIndex = html.search(importMapAppendRE);
if (importMapAppendIndex < 0) return;
if (importMapAppendIndex < importMapIndex) {
const relativeHtml = normalizePath$3(
path$n.relative(config.root, ctx.filename)
);
config.logger.warnOnce(
colors$1.yellow(
colors$1.bold(
`(!) <script type="importmap"> should come before <script type="module"> and <link rel="modulepreload"> in /${relativeHtml}`
)
)
);
}
};
}
function postImportMapHook() {
return (html) => {
if (!importMapAppendRE.test(html)) return;
let importMap;
html = html.replace(importMapRE, (match) => {
importMap = match;
return "";
});
if (importMap) {
html = html.replace(
importMapAppendRE,
(match) => `${importMap}
${match}`
);
}
return html;
};
}
function injectCspNonceMetaTagHook(config) {
return () => {
if (!config.html?.cspNonce) return;
return [
{
tag: "meta",
injectTo: "head",
// use nonce attribute so that it's hidden
// https://developer.mozilla.org/en-US/docs/Web/HTML/Global_attributes/nonce#accessing_nonces_and_nonce_hiding
attrs: { property: "csp-nonce", nonce: config.html.cspNonce }
}
];
};
}
function htmlEnvHook(config) {
const pattern = /%(\S+?)%/g;
const envPrefix = resolveEnvPrefix({ envPrefix: config.envPrefix });
const env = { ...config.env };
for (const key in config.define) {
if (key.startsWith(`import.meta.env.`)) {
const val = config.define[key];
if (typeof val === "string") {
try {
const parsed = JSON.parse(val);
env[key.slice(16)] = typeof parsed === "string" ? parsed : val;
} catch {
env[key.slice(16)] = val;
}
} else {
env[key.slice(16)] = JSON.stringify(val);
}
}
}
return (html, ctx) => {
return html.replace(pattern, (text, key) => {
if (key in env) {
return env[key];
} else {
if (envPrefix.some((prefix) => key.startsWith(prefix))) {
const relativeHtml = normalizePath$3(
path$n.relative(config.root, ctx.filename)
);
config.logger.warn(
colors$1.yellow(
colors$1.bold(
`(!) ${text} is not defined in env variables found in /${relativeHtml}. Is the variable mistyped?`
)
)
);
}
return text;
}
});
};
}
function injectNonceAttributeTagHook(config) {
const processRelType = /* @__PURE__ */ new Set(["stylesheet", "modulepreload", "preload"]);
return async (html, { filename }) => {
const nonce = config.html?.cspNonce;
if (!nonce) return;
const s = new MagicString(html);
await traverseHtml(html, filename, (node) => {
if (!nodeIsElement(node)) {
return;
}
const { nodeName, attrs, sourceCodeLocation } = node;
if (nodeName === "script" || nodeName === "style" || nodeName === "link" && attrs.some(
(attr) => attr.name === "rel" && parseRelAttr(attr.value).some((a) => processRelType.has(a))
)) {
if (attrs.some(({ name }) => name === "nonce")) {
return;
}
const startTagEndOffset = sourceCodeLocation.startTag.endOffset;
const appendOffset = html[startTagEndOffset - 2] === "/" ? 2 : 1;
s.appendRight(startTagEndOffset - appendOffset, ` nonce="${nonce}"`);
}
});
return s.toString();
};
}
function resolveHtmlTransforms(plugins, logger) {
const preHooks = [];
const normalHooks = [];
const postHooks = [];
for (const plugin of plugins) {
const hook = plugin.transformIndexHtml;
if (!hook) continue;
if (typeof hook === "function") {
normalHooks.push(hook);
} else {
if (!("order" in hook) && "enforce" in hook) {
logger.warnOnce(
colors$1.yellow(
`plugin '${plugin.name}' uses deprecated 'enforce' option. Use 'order' option instead.`
)
);
}
if (!("handler" in hook) && "transform" in hook) {
logger.warnOnce(
colors$1.yellow(
`plugin '${plugin.name}' uses deprecated 'transform' option. Use 'handler' option instead.`
)
);
}
const order = hook.order ?? (hook.enforce === "pre" ? "pre" : void 0);
const handler = hook.handler ?? hook.transform;
if (order === "pre") {
preHooks.push(handler);
} else if (order === "post") {
postHooks.push(handler);
} else {
normalHooks.push(handler);
}
}
}
return [preHooks, normalHooks, postHooks];
}
const elementsAllowedInHead = /* @__PURE__ */ new Set([
"title",
"base",
"link",
"style",
"meta",
"script",
"noscript",
"template"
]);
function headTagInsertCheck(tags, ctx) {
if (!tags.length) return;
const { logger } = ctx.server?.config || {};
const disallowedTags = tags.filter(
(tagDescriptor) => !elementsAllowedInHead.has(tagDescriptor.tag)
);
if (disallowedTags.length) {
const dedupedTags = unique(
disallowedTags.map((tagDescriptor) => `<${tagDescriptor.tag}>`)
);
logger?.warn(
colors$1.yellow(
colors$1.bold(
`[${dedupedTags.join(",")}] can not be used inside the <head> Element, please check the 'injectTo' value`
)
)
);
}
}
async function applyHtmlTransforms(html, hooks, ctx) {
for (const hook of hooks) {
const res = await hook(html, ctx);
if (!res) {
continue;
}
if (typeof res === "string") {
html = res;
} else {
let tags;
if (Array.isArray(res)) {
tags = res;
} else {
html = res.html || html;
tags = res.tags;
}
let headTags;
let headPrependTags;
let bodyTags;
let bodyPrependTags;
for (const tag of tags) {
switch (tag.injectTo) {
case "body":
(bodyTags ??= []).push(tag);
break;
case "body-prepend":
(bodyPrependTags ??= []).push(tag);
break;
case "head":
(headTags ??= []).push(tag);
break;
default:
(headPrependTags ??= []).push(tag);
}
}
headTagInsertCheck([...headTags || [], ...headPrependTags || []], ctx);
if (headPrependTags) html = injectToHead(html, headPrependTags, true);
if (headTags) html = injectToHead(html, headTags);
if (bodyPrependTags) html = injectToBody(html, bodyPrependTags, true);
if (bodyTags) html = injectToBody(html, bodyTags);
}
}
return html;
}
const importRE = /\bimport\s*(?:"[^"]*[^\\]"|'[^']*[^\\]');*/g;
const commentRE$1 = /\/\*[\s\S]*?\*\/|\/\/.*$/gm;
function isEntirelyImport(code) {
return !code.replace(importRE, "").replace(commentRE$1, "").trim().length;
}
function getBaseInHTML(urlRelativePath, config) {
return config.base === "./" || config.base === "" ? path$n.posix.join(
path$n.posix.relative(urlRelativePath, "").slice(0, -2),
"./"
) : config.base;
}
const headInjectRE = /([ \t]*)<\/head>/i;
const headPrependInjectRE = /([ \t]*)<head[^>]*>/i;
const htmlInjectRE = /<\/html>/i;
const htmlPrependInjectRE = /([ \t]*)<html[^>]*>/i;
const bodyInjectRE = /([ \t]*)<\/body>/i;
const bodyPrependInjectRE = /([ \t]*)<body[^>]*>/i;
const doctypePrependInjectRE = /<!doctype html>/i;
function injectToHead(html, tags, prepend = false) {
if (tags.length === 0) return html;
if (prepend) {
if (headPrependInjectRE.test(html)) {
return html.replace(
headPrependInjectRE,
(match, p1) => `${match}
${serializeTags(tags, incrementIndent(p1))}`
);
}
} else {
if (headInjectRE.test(html)) {
return html.replace(
headInjectRE,
(match, p1) => `${serializeTags(tags, incrementIndent(p1))}${match}`
);
}
if (bodyPrependInjectRE.test(html)) {
return html.replace(
bodyPrependInjectRE,
(match, p1) => `${serializeTags(tags, p1)}
${match}`
);
}
}
return prependInjectFallback(html, tags);
}
function injectToBody(html, tags, prepend = false) {
if (tags.length === 0) return html;
if (prepend) {
if (bodyPrependInjectRE.test(html)) {
return html.replace(
bodyPrependInjectRE,
(match, p1) => `${match}
${serializeTags(tags, incrementIndent(p1))}`
);
}
if (headInjectRE.test(html)) {
return html.replace(
headInjectRE,
(match, p1) => `${match}
${serializeTags(tags, p1)}`
);
}
return prependInjectFallback(html, tags);
} else {
if (bodyInjectRE.test(html)) {
return html.replace(
bodyInjectRE,
(match, p1) => `${serializeTags(tags, incrementIndent(p1))}${match}`
);
}
if (htmlInjectRE.test(html)) {
return html.replace(htmlInjectRE, `${serializeTags(tags)}
$&`);
}
return html + `
` + serializeTags(tags);
}
}
function prependInjectFallback(html, tags) {
if (htmlPrependInjectRE.test(html)) {
return html.replace(htmlPrependInjectRE, `$&
${serializeTags(tags)}`);
}
if (doctypePrependInjectRE.test(html)) {
return html.replace(doctypePrependInjectRE, `$&
${serializeTags(tags)}`);
}
return serializeTags(tags) + html;
}
const unaryTags = /* @__PURE__ */ new Set(["link", "meta", "base"]);
function serializeTag({ tag, attrs, children }, indent = "") {
if (unaryTags.has(tag)) {
return `<${tag}${serializeAttrs(attrs)}>`;
} else {
return `<${tag}${serializeAttrs(attrs)}>${serializeTags(
children,
incrementIndent(indent)
)}</${tag}>`;
}
}
function serializeTags(tags, indent = "") {
if (typeof tags === "string") {
return tags;
} else if (tags && tags.length) {
return tags.map((tag) => `${indent}${serializeTag(tag, indent)}
`).join("");
}
return "";
}
function serializeAttrs(attrs) {
let res = "";
for (const key in attrs) {
if (typeof attrs[key] === "boolean") {
res += attrs[key] ? ` ${key}` : ``;
} else {
res += ` ${key}=${JSON.stringify(attrs[key])}`;
}
}
return res;
}
function incrementIndent(indent = "") {
return `${indent}${indent[0] === " " ? " " : " "}`;
}
function getAttrKey(attr) {
return attr.prefix === void 0 ? attr.name : `${attr.prefix}:${attr.name}`;
}
function getAttrSourceCodeLocation(node, attrKey) {
return node.sourceCodeLocation.attrs[attrKey];
}
const decoder = new TextDecoder();
function resolveCSSOptions(options) {
if (options?.transformer === "lightningcss") {
return {
...options,
lightningcss: {
...options.lightningcss,
targets: options.lightningcss?.targets ?? convertTargets(ESBUILD_MODULES_TARGET)
}
};
}
return { ...options, lightningcss: void 0 };
}
const cssModuleRE = new RegExp(`\\.module${CSS_LANGS_RE.source}`);
const directRequestRE = /[?&]direct\b/;
const htmlProxyRE = /[?&]html-proxy\b/;
const htmlProxyIndexRE = /&index=(\d+)/;
const commonjsProxyRE = /\?commonjs-proxy/;
const inlineRE$1 = /[?&]inline\b/;
const inlineCSSRE = /[?&]inline-css\b/;
const styleAttrRE = /[?&]style-attr\b/;
const functionCallRE = /^[A-Z_][\w-]*\(/i;
const transformOnlyRE = /[?&]transform-only\b/;
const nonEscapedDoubleQuoteRe = /(?<!\\)"/g;
const cssBundleName = "style.css";
const isCSSRequest = (request) => CSS_LANGS_RE.test(request);
const isModuleCSSRequest = (request) => cssModuleRE.test(request);
const isDirectCSSRequest = (request) => CSS_LANGS_RE.test(request) && directRequestRE.test(request);
const isDirectRequest = (request) => directRequestRE.test(request);
const cssModulesCache = /* @__PURE__ */ new WeakMap();
const removedPureCssFilesCache = /* @__PURE__ */ new WeakMap();
const postcssConfigCache = /* @__PURE__ */ new WeakMap();
function encodePublicUrlsInCSS(config) {
return config.command === "build";
}
const cssUrlAssetRE = /__VITE_CSS_URL__([\da-f]+)__/g;
function cssPlugin(config) {
const isBuild = config.command === "build";
let moduleCache;
const resolveUrl = config.createResolver({
preferRelative: true,
tryIndex: false,
extensions: []
});
let preprocessorWorkerController;
if (config.css?.transformer !== "lightningcss") {
resolvePostcssConfig(config);
}
return {
name: "vite:css",
buildStart() {
moduleCache = /* @__PURE__ */ new Map();
cssModulesCache.set(config, moduleCache);
removedPureCssFilesCache.set(config, /* @__PURE__ */ new Map());
preprocessorWorkerController = createPreprocessorWorkerController(
normalizeMaxWorkers(config.css.preprocessorMaxWorkers)
);
preprocessorWorkerControllerCache.set(
config,
preprocessorWorkerController
);
},
buildEnd() {
preprocessorWorkerController?.close();
},
async load(id) {
if (!isCSSRequest(id)) return;
if (urlRE.test(id)) {
if (isModuleCSSRequest(id)) {
throw new Error(
`?url is not supported with CSS modules. (tried to import ${JSON.stringify(
id
)})`
);
}
if (isBuild) {
id = injectQuery(removeUrlQuery(id), "transform-only");
return `import ${JSON.stringify(id)};export default "__VITE_CSS_URL__${Buffer.from(id).toString(
"hex"
)}__"`;
}
}
},
async transform(raw, id) {
if (!isCSSRequest(id) || commonjsProxyRE.test(id) || SPECIAL_QUERY_RE.test(id)) {
return;
}
const urlReplacer = async (url, importer) => {
const decodedUrl = decodeURI(url);
if (checkPublicFile(decodedUrl, config)) {
if (encodePublicUrlsInCSS(config)) {
return publicFileToBuiltUrl(decodedUrl, config);
} else {
return joinUrlSegments(config.base, decodedUrl);
}
}
const [id2, fragment] = decodedUrl.split("#");
let resolved = await resolveUrl(id2, importer);
if (resolved) {
if (fragment) resolved += "#" + fragment;
return fileToUrl$1(resolved, config, this);
}
if (config.command === "build") {
const isExternal = config.build.rollupOptions.external ? resolveUserExternal(
config.build.rollupOptions.external,
decodedUrl,
// use URL as id since id could not be resolved
id2,
false
) : false;
if (!isExternal) {
config.logger.warnOnce(
`
${decodedUrl} referenced in ${id2} didn't resolve at build time, it will remain unchanged to be resolved at runtime`
);
}
}
return url;
};
const {
code: css,
modules,
deps,
map: map2
} = await compileCSS(
id,
raw,
config,
preprocessorWorkerController,
urlReplacer
);
if (modules) {
moduleCache.set(id, modules);
}
if (deps) {
for (const file of deps) {
this.addWatchFile(file);
}
}
return {
code: css,
map: map2
};
}
};
}
function cssPostPlugin(config) {
const styles = /* @__PURE__ */ new Map();
let codeSplitEmitQueue = createSerialPromiseQueue();
const urlEmitQueue = createSerialPromiseQueue();
let pureCssChunks;
let hasEmitted = false;
let chunkCSSMap;
const rollupOptionsOutput = config.build.rollupOptions.output;
const assetFileNames = (Array.isArray(rollupOptionsOutput) ? rollupOptionsOutput[0] : rollupOptionsOutput)?.assetFileNames;
const getCssAssetDirname = (cssAssetName) => {
const cssAssetNameDir = path$n.dirname(cssAssetName);
if (!assetFileNames) {
return path$n.join(config.build.assetsDir, cssAssetNameDir);
} else if (typeof assetFileNames === "string") {
return path$n.join(path$n.dirname(assetFileNames), cssAssetNameDir);
} else {
return path$n.dirname(
assetFileNames({
type: "asset",
name: cssAssetName,
originalFileName: null,
source: "/* vite internal call, ignore */"
})
);
}
};
return {
name: "vite:css-post",
renderStart() {
pureCssChunks = /* @__PURE__ */ new Set();
hasEmitted = false;
chunkCSSMap = /* @__PURE__ */ new Map();
codeSplitEmitQueue = createSerialPromiseQueue();
},
async transform(css, id, options) {
if (!isCSSRequest(id) || commonjsProxyRE.test(id) || SPECIAL_QUERY_RE.test(id)) {
return;
}
css = stripBomTag(css);
const inlineCSS = inlineCSSRE.test(id);
const isHTMLProxy = htmlProxyRE.test(id);
if (inlineCSS && isHTMLProxy) {
if (styleAttrRE.test(id)) {
css = css.replace(/"/g, """);
}
const index = htmlProxyIndexRE.exec(id)?.[1];
if (index == null) {
throw new Error(`HTML proxy index in "${id}" not found`);
}
addToHTMLProxyTransformResult(
`${getHash(cleanUrl(id))}_${Number.parseInt(index)}`,
css
);
return `export default ''`;
}
const inlined = inlineRE$1.test(id);
const modules = cssModulesCache.get(config).get(id);
const modulesCode = modules && !inlined && dataToEsm(modules, { namedExports: true, preferConst: true });
if (config.command === "serve") {
const getContentWithSourcemap = async (content) => {
if (config.css?.devSourcemap) {
const sourcemap = this.getCombinedSourcemap();
if (sourcemap.mappings) {
await injectSourcesContent(sourcemap, cleanUrl(id), config.logger);
}
return getCodeWithSourcemap("css", content, sourcemap);
}
return content;
};
if (isDirectCSSRequest(id)) {
return null;
}
if (options?.ssr) {
return modulesCode || `export default ${JSON.stringify(css)}`;
}
if (inlined) {
return `export default ${JSON.stringify(css)}`;
}
const cssContent = await getContentWithSourcemap(css);
const code2 = [
`import { updateStyle as __vite__updateStyle, removeStyle as __vite__removeStyle } from ${JSON.stringify(
path$n.posix.join(config.base, CLIENT_PUBLIC_PATH)
)}`,
`const __vite__id = ${JSON.stringify(id)}`,
`const __vite__css = ${JSON.stringify(cssContent)}`,
`__vite__updateStyle(__vite__id, __vite__css)`,
// css modules exports change on edit so it can't self accept
`${modulesCode || "import.meta.hot.accept()"}`,
`import.meta.hot.prune(() => __vite__removeStyle(__vite__id))`
].join("\n");
return { code: code2, map: { mappings: "" } };
}
if (!inlined) {
styles.set(id, css);
}
let code;
if (modulesCode) {
code = modulesCode;
} else if (inlined) {
let content = css;
if (config.build.cssMinify) {
content = await minifyCSS(content, config, true);
}
code = `export default ${JSON.stringify(content)}`;
} else {
code = "";
}
return {
code,
map: { mappings: "" },
// avoid the css module from being tree-shaken so that we can retrieve
// it in renderChunk()
moduleSideEffects: modulesCode || inlined ? false : "no-treeshake"
};
},
async renderChunk(code, chunk, opts) {
let chunkCSS = "";
const isJsChunkEmpty = code === "" && !chunk.isEntry;
let isPureCssChunk = true;
const ids = Object.keys(chunk.modules);
for (const id of ids) {
if (styles.has(id)) {
if (!transformOnlyRE.test(id)) {
chunkCSS += styles.get(id);
if (cssModuleRE.test(id)) {
isPureCssChunk = false;
}
}
} else if (!isJsChunkEmpty) {
isPureCssChunk = false;
}
}
const publicAssetUrlMap = publicAssetUrlCache.get(config);
const resolveAssetUrlsInCss = (chunkCSS2, cssAssetName) => {
const encodedPublicUrls = encodePublicUrlsInCSS(config);
const relative = config.base === "./" || config.base === "";
const cssAssetDirname = encodedPublicUrls || relative ? slash$1(getCssAssetDirname(cssAssetName)) : void 0;
const toRelative = (filename) => {
const relativePath = normalizePath$3(
path$n.relative(cssAssetDirname, filename)
);
return relativePath[0] === "." ? relativePath : "./" + relativePath;
};
chunkCSS2 = chunkCSS2.replace(assetUrlRE, (_, fileHash, postfix = "") => {
const filename = this.getFileName(fileHash) + postfix;
chunk.viteMetadata.importedAssets.add(cleanUrl(filename));
return encodeURIPath(
toOutputFilePathInCss(
filename,
"asset",
cssAssetName,
"css",
config,
toRelative
)
);
});
if (encodedPublicUrls) {
const relativePathToPublicFromCSS = normalizePath$3(
path$n.relative(cssAssetDirname, "")
);
chunkCSS2 = chunkCSS2.replace(publicAssetUrlRE, (_, hash) => {
const publicUrl = publicAssetUrlMap.get(hash).slice(1);
return encodeURIPath(
toOutputFilePathInCss(
publicUrl,
"public",
cssAssetName,
"css",
config,
() => `${relativePathToPublicFromCSS}/${publicUrl}`
)
);
});
}
return chunkCSS2;
};
function ensureFileExt(name, ext) {
return normalizePath$3(
path$n.format({ ...path$n.parse(name), base: void 0, ext })
);
}
let s;
const urlEmitTasks = [];
if (code.includes("__VITE_CSS_URL__")) {
let match;
cssUrlAssetRE.lastIndex = 0;
while (match = cssUrlAssetRE.exec(code)) {
const [full, idHex] = match;
const id = Buffer.from(idHex, "hex").toString();
const originalFileName = cleanUrl(id);
const cssAssetName = ensureFileExt(
path$n.basename(originalFileName),
".css"
);
if (!styles.has(id)) {
throw new Error(
`css content for ${JSON.stringify(id)} was not found`
);
}
let cssContent = styles.get(id);
cssContent = resolveAssetUrlsInCss(cssContent, cssAssetName);
urlEmitTasks.push({
cssAssetName,
originalFileName,
content: cssContent,
start: match.index,
end: match.index + full.length
});
}
}
await urlEmitQueue.run(
async () => Promise.all(
urlEmitTasks.map(async (info) => {
info.content = await finalizeCss(info.content, true, config);
})
)
);
if (urlEmitTasks.length > 0) {
const toRelativeRuntime = createToImportMetaURLBasedRelativeRuntime(
opts.format,
config.isWorker
);
s ||= new MagicString(code);
for (const {
cssAssetName,
originalFileName,
content,
start,
end
} of urlEmitTasks) {
const referenceId = this.emitFile({
type: "asset",
name: cssAssetName,
originalFileName,
source: content
});
generatedAssets.get(config).set(referenceId, { originalFileName });
const filename = this.getFileName(referenceId);
chunk.viteMetadata.importedAssets.add(cleanUrl(filename));
const replacement = toOutputFilePathInJS(
filename,
"asset",
chunk.fileName,
"js",
config,
toRelativeRuntime
);
const replacementString = typeof replacement === "string" ? JSON.stringify(encodeURIPath(replacement)).slice(1, -1) : `"+${replacement.runtime}+"`;
s.update(start, end, replacementString);
}
}
if (chunkCSS) {
if (isPureCssChunk && (opts.format === "es" || opts.format === "cjs")) {
pureCssChunks.add(chunk);
}
if (config.build.cssCodeSplit) {
if (opts.format === "es" || opts.format === "cjs") {
const isEntry = chunk.isEntry && isPureCssChunk;
const cssFullAssetName = ensureFileExt(chunk.name, ".css");
const cssAssetName = chunk.isEntry && (!chunk.facadeModuleId || !isCSSRequest(chunk.facadeModuleId)) ? path$n.basename(cssFullAssetName) : cssFullAssetName;
const originalFileName = getChunkOriginalFileName(
chunk,
config.root,
opts.format
);
chunkCSS = resolveAssetUrlsInCss(chunkCSS, cssAssetName);
chunkCSS = await codeSplitEmitQueue.run(async () => {
return finalizeCss(chunkCSS, true, config);
});
const referenceId = this.emitFile({
type: "asset",
name: cssAssetName,
originalFileName,
source: chunkCSS
});
generatedAssets.get(config).set(referenceId, { originalFileName, isEntry });
chunk.viteMetadata.importedCss.add(this.getFileName(referenceId));
} else if (!config.build.ssr) {
chunkCSS = await finalizeCss(chunkCSS, true, config);
let cssString = JSON.stringify(chunkCSS);
cssString = renderAssetUrlInJS(
this,
config,
chunk,
opts,
cssString
)?.toString() || cssString;
const style = `__vite_style__`;
const injectCode = `var ${style} = document.createElement('style');${style}.textContent = ${cssString};document.head.appendChild(${style});`;
let injectionPoint;
const wrapIdx = code.indexOf("System.register");
if (wrapIdx >= 0) {
const executeFnStart = code.indexOf("execute:", wrapIdx);
injectionPoint = code.indexOf("{", executeFnStart) + 1;
} else {
const insertMark = "'use strict';";
injectionPoint = code.indexOf(insertMark) + insertMark.length;
}
s ||= new MagicString(code);
s.appendRight(injectionPoint, injectCode);
}
} else {
chunkCSS = resolveAssetUrlsInCss(chunkCSS, cssBundleName);
chunkCSSMap.set(chunk.fileName, chunkCSS);
}
}
if (s) {
if (config.build.sourcemap) {
return {
code: s.toString(),
map: s.generateMap({ hires: "boundary" })
};
} else {
return { code: s.toString() };
}
}
return null;
},
augmentChunkHash(chunk) {
if (chunk.viteMetadata?.importedCss.size) {
let hash = "";
for (const id of chunk.viteMetadata.importedCss) {
hash += id;
}
return hash;
}
},
async generateBundle(opts, bundle) {
if (opts.__vite_skip_asset_emit__) {
return;
}
function extractCss() {
let css = "";
const collected = /* @__PURE__ */ new Set();
const dynamicImports = /* @__PURE__ */ new Set();
function collect(chunk) {
if (!chunk || chunk.type !== "chunk" || collected.has(chunk)) return;
collected.add(chunk);
chunk.imports.forEach((importName) => collect(bundle[importName]));
chunk.dynamicImports.forEach(
(importName) => dynamicImports.add(importName)
);
css += chunkCSSMap.get(chunk.preliminaryFileName) ?? "";
}
for (const chunk of Object.values(bundle)) {
if (chunk.type === "chunk" && chunk.isEntry) {
collect(chunk);
}
}
for (const chunkName of dynamicImports) {
collect(bundle[chunkName]);
}
return css;
}
let extractedCss = !hasEmitted && extractCss();
if (extractedCss) {
hasEmitted = true;
extractedCss = await finalizeCss(extractedCss, true, config);
this.emitFile({
name: cssBundleName,
type: "asset",
source: extractedCss
});
}
if (pureCssChunks.size) {
const prelimaryNameToChunkMap = Object.fromEntries(
Object.values(bundle).filter((chunk) => chunk.type === "chunk").map((chunk) => [chunk.preliminaryFileName, chunk.fileName])
);
const pureCssChunkNames = [...pureCssChunks].map((pureCssChunk) => prelimaryNameToChunkMap[pureCssChunk.fileName]).filter(Boolean);
const replaceEmptyChunk = getEmptyChunkReplacer(
pureCssChunkNames,
opts.format
);
for (const file in bundle) {
const chunk = bundle[file];
if (chunk.type === "chunk") {
let chunkImportsPureCssChunk = false;
chunk.imports = chunk.imports.filter((file2) => {
if (pureCssChunkNames.includes(file2)) {
const { importedCss, importedAssets } = bundle[file2].viteMetadata;
importedCss.forEach(
(file3) => chunk.viteMetadata.importedCss.add(file3)
);
importedAssets.forEach(
(file3) => chunk.viteMetadata.importedAssets.add(file3)
);
chunkImportsPureCssChunk = true;
return false;
}
return true;
});
if (chunkImportsPureCssChunk) {
chunk.code = replaceEmptyChunk(chunk.code);
}
}
}
const removedPureCssFiles = removedPureCssFilesCache.get(config);
pureCssChunkNames.forEach((fileName) => {
removedPureCssFiles.set(fileName, bundle[fileName]);
delete bundle[fileName];
delete bundle[`${fileName}.map`];
});
}
}
};
}
function cssAnalysisPlugin(config) {
let server;
return {
name: "vite:css-analysis",
configureServer(_server) {
server = _server;
},
async transform(_, id, options) {
if (!isCSSRequest(id) || commonjsProxyRE.test(id) || SPECIAL_QUERY_RE.test(id)) {
return;
}
const ssr = options?.ssr === true;
const { moduleGraph } = server;
const thisModule = moduleGraph.getModuleById(id);
if (thisModule) {
const isSelfAccepting = !cssModulesCache.get(config)?.get(id) && !inlineRE$1.test(id) && !htmlProxyRE.test(id);
const pluginImports = this._addedImports;
if (pluginImports) {
const depModules = /* @__PURE__ */ new Set();
for (const file of pluginImports) {
depModules.add(
isCSSRequest(file) ? moduleGraph.createFileOnlyEntry(file) : await moduleGraph.ensureEntryFromUrl(
fileToDevUrl(
file,
config,
/* skipBase */
true
),
ssr
)
);
}
moduleGraph.updateModuleInfo(
thisModule,
depModules,
null,
// The root CSS proxy module is self-accepting and should not
// have an explicit accept list
/* @__PURE__ */ new Set(),
null,
isSelfAccepting,
ssr
);
} else {
thisModule.isSelfAccepting = isSelfAccepting;
}
}
}
};
}
function getEmptyChunkReplacer(pureCssChunkNames, outputFormat) {
const emptyChunkFiles = pureCssChunkNames.map((file) => path$n.basename(file)).join("|").replace(/\./g, "\\.");
const emptyChunkRE = new RegExp(
outputFormat === "es" ? `\\bimport\\s*["'][^"']*(?:${emptyChunkFiles})["'];` : `(\\b|,\\s*)require\\(\\s*["'][^"']*(?:${emptyChunkFiles})["']\\)(;|,)`,
"g"
);
return (code) => code.replace(
emptyChunkRE,
// remove css import while preserving source map location
(m) => outputFormat === "es" ? `/* empty css ${"".padEnd(m.length - 15)}*/` : `${m.at(-1)}/* empty css ${"".padEnd(m.length - 16)}*/`
);
}
function createCSSResolvers(config) {
let cssResolve;
let sassResolve;
let lessResolve;
return {
get css() {
return cssResolve || (cssResolve = config.createResolver({
extensions: [".css"],
mainFields: ["style"],
conditions: ["style"],
tryIndex: false,
preferRelative: true
}));
},
get sass() {
return sassResolve || (sassResolve = config.createResolver({
extensions: [".scss", ".sass", ".css"],
mainFields: ["sass", "style"],
conditions: ["sass", "style"],
tryIndex: true,
tryPrefix: "_",
preferRelative: true
}));
},
get less() {
return lessResolve || (lessResolve = config.createResolver({
extensions: [".less", ".css"],
mainFields: ["less", "style"],
conditions: ["less", "style"],
tryIndex: false,
preferRelative: true
}));
}
};
}
function getCssResolversKeys(resolvers) {
return Object.keys(resolvers);
}
async function compileCSSPreprocessors(id, lang, code, config, workerController) {
const { preprocessorOptions, devSourcemap } = config.css ?? {};
const atImportResolvers = getAtImportResolvers(config);
const preProcessor = workerController[lang];
let opts = preprocessorOptions && preprocessorOptions[lang] || {};
switch (lang) {
case "scss" /* scss */:
case "sass" /* sass */:
opts = {
includePaths: ["node_modules"],
alias: config.resolve.alias,
...opts
};
break;
case "less" /* less */:
case "styl" /* styl */:
case "stylus" /* stylus */:
opts = {
paths: ["node_modules"],
alias: config.resolve.alias,
...opts
};
}
opts.filename = cleanUrl(id);
opts.enableSourcemap = devSourcemap ?? false;
const preprocessResult = await preProcessor(
code,
config.root,
opts,
atImportResolvers
);
if (preprocessResult.error) {
throw preprocessResult.error;
}
let deps;
if (preprocessResult.deps) {
const normalizedFilename = normalizePath$3(opts.filename);
deps = new Set(
[...preprocessResult.deps].filter(
(dep) => normalizePath$3(dep) !== normalizedFilename
)
);
}
return {
code: preprocessResult.code,
map: combineSourcemapsIfExists(
opts.filename,
preprocessResult.map,
preprocessResult.additionalMap
),
deps
};
}
const configToAtImportResolvers = /* @__PURE__ */ new WeakMap();
function getAtImportResolvers(config) {
let atImportResolvers = configToAtImportResolvers.get(config);
if (!atImportResolvers) {
atImportResolvers = createCSSResolvers(config);
configToAtImportResolvers.set(config, atImportResolvers);
}
return atImportResolvers;
}
async function compileCSS(id, code, config, workerController, urlReplacer) {
if (config.css?.transformer === "lightningcss") {
return compileLightningCSS(id, code, config, urlReplacer);
}
const { modules: modulesOptions, devSourcemap } = config.css || {};
const isModule = modulesOptions !== false && cssModuleRE.test(id);
const needInlineImport = code.includes("@import");
const hasUrl = cssUrlRE.test(code) || cssImageSetRE.test(code);
const lang = CSS_LANGS_RE.exec(id)?.[1];
const postcssConfig = await resolvePostcssConfig(config);
if (lang === "css" && !postcssConfig && !isModule && !needInlineImport && !hasUrl) {
return { code, map: null };
}
let modules;
const deps = /* @__PURE__ */ new Set();
let preprocessorMap;
if (isPreProcessor(lang)) {
const preprocessorResult = await compileCSSPreprocessors(
id,
lang,
code,
config,
workerController
);
code = preprocessorResult.code;
preprocessorMap = preprocessorResult.map;
preprocessorResult.deps?.forEach((dep) => deps.add(dep));
}
const atImportResolvers = getAtImportResolvers(config);
const postcssOptions = postcssConfig && postcssConfig.options || {};
const postcssPlugins = postcssConfig && postcssConfig.plugins ? postcssConfig.plugins.slice() : [];
if (needInlineImport) {
postcssPlugins.unshift(
(await importPostcssImport()).default({
async resolve(id2, basedir) {
const publicFile = checkPublicFile(id2, config);
if (publicFile) {
return publicFile;
}
const resolved = await atImportResolvers.css(
id2,
path$n.join(basedir, "*")
);
if (resolved) {
return path$n.resolve(resolved);
}
if (!path$n.isAbsolute(id2)) {
config.logger.error(
colors$1.red(
`Unable to resolve \`@import "${id2}"\` from ${basedir}`
)
);
}
return id2;
},
async load(id2) {
const code2 = await fs__default.promises.readFile(id2, "utf-8");
const lang2 = CSS_LANGS_RE.exec(id2)?.[1];
if (isPreProcessor(lang2)) {
const result = await compileCSSPreprocessors(
id2,
lang2,
code2,
config,
workerController
);
result.deps?.forEach((dep) => deps.add(dep));
return result.code;
}
return code2;
},
nameLayer(index) {
return `vite--anon-layer-${getHash(id)}-${index}`;
}
})
);
}
if (urlReplacer) {
postcssPlugins.push(
UrlRewritePostcssPlugin({
replacer: urlReplacer,
logger: config.logger
})
);
}
if (isModule) {
postcssPlugins.unshift(
(await importPostcssModules()).default({
...modulesOptions,
localsConvention: modulesOptions?.localsConvention,
getJSON(cssFileName, _modules, outputFileName) {
modules = _modules;
if (modulesOptions && typeof modulesOptions.getJSON === "function") {
modulesOptions.getJSON(cssFileName, _modules, outputFileName);
}
},
async resolve(id2, importer) {
for (const key of getCssResolversKeys(atImportResolvers)) {
const resolved = await atImportResolvers[key](id2, importer);
if (resolved) {
return path$n.resolve(resolved);
}
}
return id2;
}
})
);
}
if (!postcssPlugins.length) {
return {
code,
map: preprocessorMap,
deps
};
}
let postcssResult;
try {
const source = removeDirectQuery(id);
const postcss = await importPostcss();
postcssResult = await postcss.default(postcssPlugins).process(code, {
...postcssOptions,
parser: lang === "sss" ? loadSss(config.root) : postcssOptions.parser,
to: source,
from: source,
...devSourcemap ? {
map: {
inline: false,
annotation: false,
// postcss may return virtual files
// we cannot obtain content of them, so this needs to be enabled
sourcesContent: true
// when "prev: preprocessorMap", the result map may include duplicate filename in `postcssResult.map.sources`
// prev: preprocessorMap,
}
} : {}
});
for (const message of postcssResult.messages) {
if (message.type === "dependency") {
deps.add(normalizePath$3(message.file));
} else if (message.type === "dir-dependency") {
const { dir, glob: globPattern = "**" } = message;
const pattern = glob.escapePath(normalizePath$3(path$n.resolve(path$n.dirname(id), dir))) + `/` + globPattern;
const files = glob.sync(pattern, {
ignore: ["**/node_modules/**"]
});
for (let i = 0; i < files.length; i++) {
deps.add(files[i]);
}
} else if (message.type === "warning") {
const warning = message;
let msg = `[vite:css] ${warning.text}`;
msg += `
${generateCodeFrame(
code,
{
line: warning.line,
column: warning.column - 1
// 1-based
},
warning.endLine !== void 0 && warning.endColumn !== void 0 ? {
line: warning.endLine,
column: warning.endColumn - 1
// 1-based
} : void 0
)}`;
config.logger.warn(colors$1.yellow(msg));
}
}
} catch (e) {
e.message = `[postcss] ${e.message}`;
e.code = code;
e.loc = {
file: e.file,
line: e.line,
column: e.column - 1
// 1-based
};
throw e;
}
if (!devSourcemap) {
return {
ast: postcssResult,
code: postcssResult.css,
map: { mappings: "" },
modules,
deps
};
}
const rawPostcssMap = postcssResult.map.toJSON();
const postcssMap = await formatPostcssSourceMap(
// version property of rawPostcssMap is declared as string
// but actually it is a number
rawPostcssMap,
cleanUrl(id)
);
return {
ast: postcssResult,
code: postcssResult.css,
map: combineSourcemapsIfExists(cleanUrl(id), postcssMap, preprocessorMap),
modules,
deps
};
}
function createCachedImport(imp) {
let cached;
return () => {
if (!cached) {
cached = imp().then((module) => {
cached = module;
return module;
});
}
return cached;
};
}
const importPostcssImport = createCachedImport(() => import('./dep-CHbHjMYU.js').then(function (n) { return n.i; }));
const importPostcssModules = createCachedImport(() => import('./dep-B8yndt7W.js').then(function (n) { return n.i; }));
const importPostcss = createCachedImport(() => import('postcss'));
const preprocessorWorkerControllerCache = /* @__PURE__ */ new WeakMap();
let alwaysFakeWorkerWorkerControllerCache;
async function preprocessCSS(code, filename, config) {
let workerController = preprocessorWorkerControllerCache.get(config);
if (!workerController) {
alwaysFakeWorkerWorkerControllerCache ||= createPreprocessorWorkerController(0);
workerController = alwaysFakeWorkerWorkerControllerCache;
}
return await compileCSS(filename, code, config, workerController);
}
async function formatPostcssSourceMap(rawMap, file) {
const inputFileDir = path$n.dirname(file);
const sources = rawMap.sources.map((source) => {
const cleanSource = cleanUrl(decodeURIComponent(source));
if (cleanSource[0] === "<" && cleanSource[cleanSource.length - 1] === ">") {
return `\0${cleanSource}`;
}
return normalizePath$3(path$n.resolve(inputFileDir, cleanSource));
});
return {
file,
mappings: rawMap.mappings,
names: rawMap.names,
sources,
sourcesContent: rawMap.sourcesContent,
version: rawMap.version
};
}
function combineSourcemapsIfExists(filename, map1, map2) {
return map1 && map2 ? combineSourcemaps(filename, [
// type of version property of ExistingRawSourceMap is number
// but it is always 3
map1,
map2
]) : map1;
}
async function finalizeCss(css, minify, config) {
if (css.includes("@import") || css.includes("@charset")) {
css = await hoistAtRules(css);
}
if (config.build.cssMinify) {
css = await minifyCSS(css, config, false);
}
return css;
}
async function resolvePostcssConfig(config) {
let result = postcssConfigCache.get(config);
if (result !== void 0) {
return await result;
}
const inlineOptions = config.css?.postcss;
if (isObject$1(inlineOptions)) {
const options = { ...inlineOptions };
delete options.plugins;
result = {
options,
plugins: inlineOptions.plugins || []
};
} else {
const searchPath = typeof inlineOptions === "string" ? inlineOptions : config.root;
result = postcssrc({}, searchPath).catch((e) => {
if (!e.message.includes("No PostCSS Config found")) {
if (e instanceof Error) {
const { name, message, stack } = e;
e.name = "Failed to load PostCSS config";
e.message = `Failed to load PostCSS config (searchPath: ${searchPath}): [${name}] ${message}
${stack}`;
e.stack = "";
throw e;
} else {
throw new Error(`Failed to load PostCSS config: ${e}`);
}
}
return null;
});
result.then((resolved) => {
postcssConfigCache.set(config, resolved);
});
}
postcssConfigCache.set(config, result);
return result;
}
const cssUrlRE = /(?<=^|[^\w\-\u0080-\uffff])url\((\s*('[^']+'|"[^"]+")\s*|[^'")]+)\)/;
const cssDataUriRE = /(?<=^|[^\w\-\u0080-\uffff])data-uri\((\s*('[^']+'|"[^"]+")\s*|[^'")]+)\)/;
const importCssRE = /@import ('[^']+\.css'|"[^"]+\.css"|[^'")]+\.css)/;
const cssImageSetRE = /(?<=image-set\()((?:[\w\-]{1,256}\([^)]*\)|[^)])*)(?=\))/;
const UrlRewritePostcssPlugin = (opts) => {
if (!opts) {
throw new Error("base or replace is required");
}
return {
postcssPlugin: "vite-url-rewrite",
Once(root) {
const promises = [];
root.walkDecls((declaration) => {
const importer = declaration.source?.input.file;
if (!importer) {
opts.logger.warnOnce(
"\nA PostCSS plugin did not pass the `from` option to `postcss.parse`. This may cause imported assets to be incorrectly transformed. If you've recently added a PostCSS plugin that raised this warning, please contact the package author to fix the issue."
);
}
const isCssUrl = cssUrlRE.test(declaration.value);
const isCssImageSet = cssImageSetRE.test(declaration.value);
if (isCssUrl || isCssImageSet) {
const replacerForDeclaration = (rawUrl) => {
return opts.replacer(rawUrl, importer);
};
const rewriterToUse = isCssImageSet ? rewriteCssImageSet : rewriteCssUrls;
promises.push(
rewriterToUse(declaration.value, replacerForDeclaration).then(
(url) => {
declaration.value = url;
}
)
);
}
});
if (promises.length) {
return Promise.all(promises);
}
}
};
};
UrlRewritePostcssPlugin.postcss = true;
function rewriteCssUrls(css, replacer) {
return asyncReplace(css, cssUrlRE, async (match) => {
const [matched, rawUrl] = match;
return await doUrlReplace(rawUrl.trim(), matched, replacer);
});
}
function rewriteCssDataUris(css, replacer) {
return asyncReplace(css, cssDataUriRE, async (match) => {
const [matched, rawUrl] = match;
return await doUrlReplace(rawUrl.trim(), matched, replacer, "data-uri");
});
}
function rewriteImportCss(css, replacer) {
return asyncReplace(css, importCssRE, async (match) => {
const [matched, rawUrl] = match;
return await doImportCSSReplace(rawUrl, matched, replacer);
});
}
const cssNotProcessedRE = /(?:gradient|element|cross-fade|image)\(/;
async function rewriteCssImageSet(css, replacer) {
return await asyncReplace(css, cssImageSetRE, async (match) => {
const [, rawUrl] = match;
const url = await processSrcSet(rawUrl, async ({ url: url2 }) => {
if (cssUrlRE.test(url2)) {
return await rewriteCssUrls(url2, replacer);
}
if (!cssNotProcessedRE.test(url2)) {
return await doUrlReplace(url2, url2, replacer);
}
return url2;
});
return url;
});
}
function skipUrlReplacer(rawUrl) {
return isExternalUrl(rawUrl) || isDataUrl(rawUrl) || rawUrl[0] === "#" || functionCallRE.test(rawUrl);
}
async function doUrlReplace(rawUrl, matched, replacer, funcName = "url") {
let wrap = "";
const first = rawUrl[0];
if (first === `"` || first === `'`) {
wrap = first;
rawUrl = rawUrl.slice(1, -1);
}
if (skipUrlReplacer(rawUrl)) {
return matched;
}
let newUrl = await replacer(rawUrl);
if (wrap === "" && newUrl !== encodeURI(newUrl)) {
wrap = '"';
}
if (wrap === "'" && newUrl.includes("'")) {
wrap = '"';
}
if (wrap === '"' && newUrl.includes('"')) {
newUrl = newUrl.replace(nonEscapedDoubleQuoteRe, '\\"');
}
return `${funcName}(${wrap}${newUrl}${wrap})`;
}
async function doImportCSSReplace(rawUrl, matched, replacer) {
let wrap = "";
const first = rawUrl[0];
if (first === `"` || first === `'`) {
wrap = first;
rawUrl = rawUrl.slice(1, -1);
}
if (isExternalUrl(rawUrl) || isDataUrl(rawUrl) || rawUrl[0] === "#") {
return matched;
}
return `@import ${wrap}${await replacer(rawUrl)}${wrap}`;
}
async function minifyCSS(css, config, inlined) {
if (config.build.cssMinify === "lightningcss") {
const { code, warnings } = (await importLightningCSS()).transform({
...config.css?.lightningcss,
targets: convertTargets(config.build.cssTarget),
cssModules: void 0,
filename: cssBundleName,
code: Buffer.from(css),
minify: true
});
if (warnings.length) {
config.logger.warn(
colors$1.yellow(
`warnings when minifying css:
${warnings.map((w) => w.message).join("\n")}`
)
);
}
return decoder.decode(code) + (inlined ? "" : "\n");
}
try {
const { code, warnings } = await transform$1(css, {
loader: "css",
target: config.build.cssTarget || void 0,
...resolveMinifyCssEsbuildOptions(config.esbuild || {})
});
if (warnings.length) {
const msgs = await formatMessages(warnings, { kind: "warning" });
config.logger.warn(
colors$1.yellow(`warnings when minifying css:
${msgs.join("\n")}`)
);
}
return inlined ? code.trimEnd() : code;
} catch (e) {
if (e.errors) {
e.message = "[esbuild css minify] " + e.message;
const msgs = await formatMessages(e.errors, { kind: "error" });
e.frame = "\n" + msgs.join("\n");
e.loc = e.errors[0].location;
}
throw e;
}
}
function resolveMinifyCssEsbuildOptions(options) {
const base = {
charset: options.charset ?? "utf8",
logLevel: options.logLevel,
logLimit: options.logLimit,
logOverride: options.logOverride,
legalComments: options.legalComments
};
if (options.minifyIdentifiers != null || options.minifySyntax != null || options.minifyWhitespace != null) {
return {
...base,
minifyIdentifiers: options.minifyIdentifiers ?? true,
minifySyntax: options.minifySyntax ?? true,
minifyWhitespace: options.minifyWhitespace ?? true
};
} else {
return { ...base, minify: true };
}
}
const atImportRE = /@import(?:\s*(?:url\([^)]*\)|"(?:[^"]|(?<=\\)")*"|'(?:[^']|(?<=\\)')*').*?|[^;]*);/g;
const atCharsetRE = /@charset(?:\s*(?:"(?:[^"]|(?<=\\)")*"|'(?:[^']|(?<=\\)')*').*?|[^;]*);/g;
async function hoistAtRules(css) {
const s = new MagicString(css);
const cleanCss = emptyCssComments(css);
let match;
atImportRE.lastIndex = 0;
while (match = atImportRE.exec(cleanCss)) {
s.remove(match.index, match.index + match[0].length);
s.appendLeft(0, match[0]);
}
atCharsetRE.lastIndex = 0;
let foundCharset = false;
while (match = atCharsetRE.exec(cleanCss)) {
s.remove(match.index, match.index + match[0].length);
if (!foundCharset) {
s.prepend(match[0]);
foundCharset = true;
}
}
return s.toString();
}
const loadedPreprocessorPath = {};
function loadPreprocessorPath(lang, root) {
const cached = loadedPreprocessorPath[lang];
if (cached) {
return cached;
}
try {
const resolved = requireResolveFromRootWithFallback(root, lang);
return loadedPreprocessorPath[lang] = resolved;
} catch (e) {
if (e.code === "MODULE_NOT_FOUND") {
const installCommand = getPackageManagerCommand("install");
throw new Error(
`Preprocessor dependency "${lang}" not found. Did you install it? Try \`${installCommand} -D ${lang}\`.`
);
} else {
const message = new Error(
`Preprocessor dependency "${lang}" failed to load:
${e.message}`
);
message.stack = e.stack + "\n" + message.stack;
throw message;
}
}
}
function loadSassPackage(root) {
try {
const path2 = loadPreprocessorPath("sass-embedded", root);
return { name: "sass-embedded", path: path2 };
} catch (e1) {
try {
const path2 = loadPreprocessorPath("sass" /* sass */, root);
return { name: "sass", path: path2 };
} catch (e2) {
throw e1;
}
}
}
let cachedSss;
function loadSss(root) {
if (cachedSss) return cachedSss;
const sssPath = loadPreprocessorPath("sugarss" /* sss */, root);
cachedSss = createRequire$1(import.meta.url)(sssPath);
return cachedSss;
}
function cleanScssBugUrl(url) {
if (
// check bug via `window` and `location` global
typeof window !== "undefined" && typeof location !== "undefined" && typeof location?.href === "string"
) {
const prefix = location.href.replace(/\/$/, "");
return url.replace(prefix, "");
} else {
return url;
}
}
function fixScssBugImportValue(data) {
if (
// check bug via `window` and `location` global
typeof window !== "undefined" && typeof location !== "undefined" && data && "file" in data && (!("contents" in data) || data.contents == null)
) {
data.contents = fs__default.readFileSync(data.file, "utf-8");
}
return data;
}
const makeScssWorker = (resolvers, alias, maxWorkers) => {
const internalImporter = async (url, importer, filename) => {
importer = cleanScssBugUrl(importer);
const resolved = await resolvers.sass(url, importer);
if (resolved) {
try {
const data = await rebaseUrls(
resolved,
filename,
alias,
"$",
resolvers.sass
);
return fixScssBugImportValue(data);
} catch (data) {
return data;
}
} else {
return null;
}
};
const worker = new WorkerWithFallback(
() => async (sassPath, data, options) => {
const sass = require(sassPath);
const path2 = require("node:path");
const _internalImporter = (url, importer2, done) => {
internalImporter(url, importer2, options.filename).then(
(data2) => done?.(data2)
);
};
const importer = [_internalImporter];
if (options.importer) {
Array.isArray(options.importer) ? importer.unshift(...options.importer) : importer.unshift(options.importer);
}
const finalOptions = {
...options,
data,
file: options.filename,
outFile: options.filename,
importer,
...options.enableSourcemap ? {
sourceMap: true,
omitSourceMapUrl: true,
sourceMapRoot: path2.dirname(options.filename)
} : {}
};
return new Promise((resolve, reject) => {
sass.render(finalOptions, (err, res) => {
if (err) {
reject(err);
} else {
resolve({
css: res.css.toString(),
map: res.map?.toString(),
stats: res.stats
});
}
});
});
},
{
parentFunctions: { internalImporter },
shouldUseFake(_sassPath, _data, options) {
return !!(options.functions && Object.keys(options.functions).length > 0 || options.importer && (!Array.isArray(options.importer) || options.importer.length > 0));
},
max: maxWorkers
}
);
return worker;
};
const makeModernScssWorker = (resolvers, alias, maxWorkers) => {
const internalCanonicalize = async (url, importer) => {
importer = cleanScssBugUrl(importer);
const resolved = await resolvers.sass(url, importer);
return resolved ?? null;
};
const internalLoad = async (file, rootFile) => {
const result = await rebaseUrls(file, rootFile, alias, "$", resolvers.sass);
if (result.contents) {
return result.contents;
}
return await fsp.readFile(result.file, "utf-8");
};
const worker = new WorkerWithFallback(
() => async (sassPath, data, options) => {
const sass = require(sassPath);
const path2 = require("node:path");
const { fileURLToPath: fileURLToPath2, pathToFileURL: pathToFileURL2 } = (
// eslint-disable-next-line no-restricted-globals
require("node:url")
);
const sassOptions = { ...options };
sassOptions.url = pathToFileURL2(options.filename);
sassOptions.sourceMap = options.enableSourcemap;
const internalImporter = {
async canonicalize(url, context) {
const importer = context.containingUrl ? fileURLToPath2(context.containingUrl) : options.filename;
const resolved = await internalCanonicalize(url, importer);
return resolved ? pathToFileURL2(resolved) : null;
},
async load(canonicalUrl) {
const ext = path2.extname(canonicalUrl.pathname);
let syntax = "scss";
if (ext === ".sass") {
syntax = "indented";
} else if (ext === ".css") {
syntax = "css";
}
const contents = await internalLoad(
fileURLToPath2(canonicalUrl),
options.filename
);
return { contents, syntax };
}
};
sassOptions.importers = [
...sassOptions.importers ?? [],
internalImporter
];
const result = await sass.compileStringAsync(data, sassOptions);
return {
css: result.css,
map: result.sourceMap ? JSON.stringify(result.sourceMap) : void 0,
stats: {
includedFiles: result.loadedUrls.filter((url) => url.protocol === "file:").map((url) => fileURLToPath2(url))
}
};
},
{
parentFunctions: {
internalCanonicalize,
internalLoad
},
shouldUseFake(_sassPath, _data, options) {
return !!(options.functions && Object.keys(options.functions).length > 0 || options.importers && (!Array.isArray(options.importers) || options.importers.length > 0));
},
max: maxWorkers
}
);
return worker;
};
const makeModernCompilerScssWorker = (resolvers, alias, _maxWorkers) => {
let compiler;
const worker = {
async run(sassPath, data, options) {
const sass = (await import(pathToFileURL(sassPath).href)).default;
compiler ??= await sass.initAsyncCompiler();
const sassOptions = { ...options };
sassOptions.url = pathToFileURL(options.filename);
sassOptions.sourceMap = options.enableSourcemap;
const internalImporter = {
async canonicalize(url, context) {
const importer = context.containingUrl ? fileURLToPath(context.containingUrl) : options.filename;
const resolved = await resolvers.sass(url, cleanScssBugUrl(importer));
return resolved ? pathToFileURL(resolved) : null;
},
async load(canonicalUrl) {
const ext = path$n.extname(canonicalUrl.pathname);
let syntax = "scss";
if (ext === ".sass") {
syntax = "indented";
} else if (ext === ".css") {
syntax = "css";
}
const result2 = await rebaseUrls(
fileURLToPath(canonicalUrl),
options.filename,
alias,
"$",
resolvers.sass
);
const contents = result2.contents ?? await fsp.readFile(result2.file, "utf-8");
return { contents, syntax };
}
};
sassOptions.importers = [
...sassOptions.importers ?? [],
internalImporter
];
const result = await compiler.compileStringAsync(data, sassOptions);
return {
css: result.css,
map: result.sourceMap ? JSON.stringify(result.sourceMap) : void 0,
stats: {
includedFiles: result.loadedUrls.filter((url) => url.protocol === "file:").map((url) => fileURLToPath(url))
}
};
},
async stop() {
compiler?.dispose();
compiler = void 0;
}
};
return worker;
};
const scssProcessor = (maxWorkers) => {
const workerMap = /* @__PURE__ */ new Map();
return {
close() {
for (const worker of workerMap.values()) {
worker.stop();
}
},
async process(source, root, options, resolvers) {
const sassPackage = loadSassPackage(root);
const api = options.api ?? "legacy";
if (!workerMap.has(options.alias)) {
workerMap.set(
options.alias,
api === "modern-compiler" ? makeModernCompilerScssWorker(resolvers, options.alias) : api === "modern" ? makeModernScssWorker(resolvers, options.alias, maxWorkers) : makeScssWorker(resolvers, options.alias, maxWorkers)
);
}
const worker = workerMap.get(options.alias);
const { content: data, map: additionalMap } = await getSource(
source,
options.filename,
options.additionalData,
options.enableSourcemap
);
const optionsWithoutAdditionalData = {
...options,
additionalData: void 0
};
try {
const result = await worker.run(
sassPackage.path,
data,
optionsWithoutAdditionalData
);
const deps = result.stats.includedFiles.map((f) => cleanScssBugUrl(f));
const map2 = result.map ? JSON.parse(result.map.toString()) : void 0;
return {
code: result.css.toString(),
map: map2,
additionalMap,
deps
};
} catch (e) {
e.message = `[sass] ${e.message}`;
e.id = e.file;
e.frame = e.formatted;
return { code: "", error: e, deps: [] };
}
}
};
};
async function rebaseUrls(file, rootFile, alias, variablePrefix, resolver) {
file = path$n.resolve(file);
const fileDir = path$n.dirname(file);
const rootDir = path$n.dirname(rootFile);
if (fileDir === rootDir) {
return { file };
}
const content = await fsp.readFile(file, "utf-8");
const hasUrls = cssUrlRE.test(content);
const hasDataUris = cssDataUriRE.test(content);
const hasImportCss = importCssRE.test(content);
if (!hasUrls && !hasDataUris && !hasImportCss) {
return { file };
}
let rebased;
const rebaseFn = async (url) => {
if (url[0] === "/") return url;
if (url.startsWith(variablePrefix)) return url;
for (const { find } of alias) {
const matches = typeof find === "string" ? url.startsWith(find) : find.test(url);
if (matches) {
return url;
}
}
const absolute = await resolver(url, file) || path$n.resolve(fileDir, url);
const relative = path$n.relative(rootDir, absolute);
return normalizePath$3(relative);
};
if (hasImportCss) {
rebased = await rewriteImportCss(content, rebaseFn);
}
if (hasUrls) {
rebased = await rewriteCssUrls(rebased || content, rebaseFn);
}
if (hasDataUris) {
rebased = await rewriteCssDataUris(rebased || content, rebaseFn);
}
return {
file,
contents: rebased
};
}
const makeLessWorker = (resolvers, alias, maxWorkers) => {
const viteLessResolve = async (filename, dir, rootFile) => {
const resolved = await resolvers.less(filename, path$n.join(dir, "*"));
if (!resolved) return void 0;
const result = await rebaseUrls(
resolved,
rootFile,
alias,
"@",
resolvers.less
);
if (result) {
return {
resolved,
contents: "contents" in result ? result.contents : void 0
};
}
return result;
};
const worker = new WorkerWithFallback(
() => {
const fsp2 = require("node:fs/promises");
const path2 = require("node:path");
let ViteLessManager;
const createViteLessPlugin = (less, rootFile) => {
const { FileManager } = less;
ViteLessManager ??= class ViteManager extends FileManager {
rootFile;
constructor(rootFile2) {
super();
this.rootFile = rootFile2;
}
supports(filename) {
return !/^(?:https?:)?\/\//.test(filename);
}
supportsSync() {
return false;
}
async loadFile(filename, dir, opts, env) {
const result = await viteLessResolve(filename, dir, this.rootFile);
if (result) {
return {
filename: path2.resolve(result.resolved),
contents: result.contents ?? await fsp2.readFile(result.resolved, "utf-8")
};
} else {
return super.loadFile(filename, dir, opts, env);
}
}
};
return {
install(_, pluginManager) {
pluginManager.addFileManager(new ViteLessManager(rootFile));
},
minVersion: [3, 0, 0]
};
};
return async (lessPath, content, options) => {
const nodeLess = require(lessPath);
const viteResolverPlugin = createViteLessPlugin(
nodeLess,
options.filename
);
const result = await nodeLess.render(content, {
...options,
plugins: [viteResolverPlugin, ...options.plugins || []],
...options.enableSourcemap ? {
sourceMap: {
outputSourceFiles: true,
sourceMapFileInline: false
}
} : {}
});
return result;
};
},
{
parentFunctions: { viteLessResolve },
shouldUseFake(_lessPath, _content, options) {
return options.plugins?.length > 0;
},
max: maxWorkers
}
);
return worker;
};
const lessProcessor = (maxWorkers) => {
const workerMap = /* @__PURE__ */ new Map();
return {
close() {
for (const worker of workerMap.values()) {
worker.stop();
}
},
async process(source, root, options, resolvers) {
const lessPath = loadPreprocessorPath("less" /* less */, root);
if (!workerMap.has(options.alias)) {
workerMap.set(
options.alias,
makeLessWorker(resolvers, options.alias, maxWorkers)
);
}
const worker = workerMap.get(options.alias);
const { content, map: additionalMap } = await getSource(
source,
options.filename,
options.additionalData,
options.enableSourcemap
);
let result;
const optionsWithoutAdditionalData = {
...options,
additionalData: void 0
};
try {
result = await worker.run(
lessPath,
content,
optionsWithoutAdditionalData
);
} catch (e) {
const error = e;
const normalizedError = new Error(
`[less] ${error.message || error.type}`
);
normalizedError.loc = {
file: error.filename || options.filename,
line: error.line,
column: error.column
};
return { code: "", error: normalizedError, deps: [] };
}
const map2 = result.map && JSON.parse(result.map);
if (map2) {
delete map2.sourcesContent;
}
return {
code: result.css.toString(),
map: map2,
additionalMap,
deps: result.imports
};
}
};
};
const makeStylWorker = (maxWorkers) => {
const worker = new WorkerWithFallback(
() => {
return async (stylusPath, content, root, options) => {
const nodeStylus = require(stylusPath);
const ref = nodeStylus(content, options);
if (options.define) {
for (const key in options.define) {
ref.define(key, options.define[key]);
}
}
if (options.enableSourcemap) {
ref.set("sourcemap", {
comment: false,
inline: false,
basePath: root
});
}
return {
code: ref.render(),
// @ts-expect-error sourcemap exists
map: ref.sourcemap,
deps: ref.deps()
};
};
},
{
shouldUseFake(_stylusPath, _content, _root, options) {
return !!(options.define && Object.values(options.define).some((d) => typeof d === "function"));
},
max: maxWorkers
}
);
return worker;
};
const stylProcessor = (maxWorkers) => {
const workerMap = /* @__PURE__ */ new Map();
return {
close() {
for (const worker of workerMap.values()) {
worker.stop();
}
},
async process(source, root, options, resolvers) {
const stylusPath = loadPreprocessorPath("stylus" /* stylus */, root);
if (!workerMap.has(options.alias)) {
workerMap.set(options.alias, makeStylWorker(maxWorkers));
}
const worker = workerMap.get(options.alias);
const { content, map: additionalMap } = await getSource(
source,
options.filename,
options.additionalData,
options.enableSourcemap,
"\n"
);
const importsDeps = (options.imports ?? []).map(
(dep) => path$n.resolve(dep)
);
const optionsWithoutAdditionalData = {
...options,
additionalData: void 0
};
try {
const { code, map: map2, deps } = await worker.run(
stylusPath,
content,
root,
optionsWithoutAdditionalData
);
return {
code,
map: formatStylusSourceMap(map2, root),
additionalMap,
// Concat imports deps with computed deps
deps: [...deps, ...importsDeps]
};
} catch (e) {
const wrapped = new Error(`[stylus] ${e.message}`);
wrapped.name = e.name;
wrapped.stack = e.stack;
return { code: "", error: wrapped, deps: [] };
}
}
};
};
function formatStylusSourceMap(mapBefore, root) {
if (!mapBefore) return void 0;
const map2 = { ...mapBefore };
const resolveFromRoot = (p) => normalizePath$3(path$n.resolve(root, p));
if (map2.file) {
map2.file = resolveFromRoot(map2.file);
}
map2.sources = map2.sources.map(resolveFromRoot);
return map2;
}
async function getSource(source, filename, additionalData, enableSourcemap, sep = "") {
if (!additionalData) return { content: source };
if (typeof additionalData === "function") {
const newContent = await additionalData(source, filename);
if (typeof newContent === "string") {
return { content: newContent };
}
return newContent;
}
if (!enableSourcemap) {
return { content: additionalData + sep + source };
}
const ms = new MagicString(source);
ms.appendLeft(0, sep);
ms.appendLeft(0, additionalData);
const map2 = ms.generateMap({ hires: "boundary" });
map2.file = filename;
map2.sources = [filename];
return {
content: ms.toString(),
map: map2
};
}
const createPreprocessorWorkerController = (maxWorkers) => {
const scss = scssProcessor(maxWorkers);
const less = lessProcessor(maxWorkers);
const styl = stylProcessor(maxWorkers);
const sassProcess = (source, root, options, resolvers) => {
return scss.process(
source,
root,
{ ...options, indentedSyntax: true, syntax: "indented" },
resolvers
);
};
const close = () => {
less.close();
scss.close();
styl.close();
};
return {
["less" /* less */]: less.process,
["scss" /* scss */]: scss.process,
["sass" /* sass */]: sassProcess,
["styl" /* styl */]: styl.process,
["stylus" /* stylus */]: styl.process,
close
};
};
const normalizeMaxWorkers = (maxWorker) => {
if (maxWorker === void 0) return 0;
if (maxWorker === true) return void 0;
return maxWorker;
};
const preprocessorSet = /* @__PURE__ */ new Set([
"less" /* less */,
"sass" /* sass */,
"scss" /* scss */,
"styl" /* styl */,
"stylus" /* stylus */
]);
function isPreProcessor(lang) {
return lang && preprocessorSet.has(lang);
}
const importLightningCSS = createCachedImport(() => import('lightningcss'));
async function compileLightningCSS(id, src, config, urlReplacer) {
const deps = /* @__PURE__ */ new Set();
const filename = cleanUrl(path$n.relative(config.root, id));
const toAbsolute = (filePath) => path$n.isAbsolute(filePath) ? filePath : path$n.join(config.root, filePath);
const res = styleAttrRE.test(id) ? (await importLightningCSS()).transformStyleAttribute({
filename,
code: Buffer.from(src),
targets: config.css?.lightningcss?.targets,
minify: config.isProduction && !!config.build.cssMinify,
analyzeDependencies: true
}) : await (await importLightningCSS()).bundleAsync({
...config.css?.lightningcss,
filename,
resolver: {
read(filePath) {
if (filePath === filename) {
return src;
}
if (!filePath.endsWith(".css")) {
return src;
}
return fs__default.readFileSync(toAbsolute(filePath), "utf-8");
},
async resolve(id2, from) {
const publicFile = checkPublicFile(id2, config);
if (publicFile) {
return publicFile;
}
const resolved = await getAtImportResolvers(config).css(
id2,
toAbsolute(from)
);
if (resolved) {
deps.add(resolved);
return resolved;
}
return id2;
}
},
minify: config.isProduction && !!config.build.cssMinify,
sourceMap: config.command === "build" ? !!config.build.sourcemap : config.css?.devSourcemap,
analyzeDependencies: true,
cssModules: cssModuleRE.test(id) ? config.css?.lightningcss?.cssModules ?? true : void 0
});
let css = decoder.decode(res.code);
for (const dep of res.dependencies) {
switch (dep.type) {
case "url":
if (skipUrlReplacer(dep.url)) {
css = css.replace(dep.placeholder, () => dep.url);
break;
}
deps.add(dep.url);
if (urlReplacer) {
const replaceUrl = await urlReplacer(dep.url, dep.loc.filePath);
css = css.replace(dep.placeholder, () => replaceUrl);
} else {
css = css.replace(dep.placeholder, () => dep.url);
}
break;
default:
throw new Error(`Unsupported dependency type: ${dep.type}`);
}
}
let modules;
if ("exports" in res && res.exports) {
modules = {};
const sortedEntries = Object.entries(res.exports).sort(
(a, b) => a[0].localeCompare(b[0])
);
for (const [key, value] of sortedEntries) {
modules[key] = value.name;
for (const c of value.composes) {
modules[key] += " " + c.name;
}
}
}
return {
code: css,
map: "map" in res ? res.map?.toString() : void 0,
deps,
modules
};
}
const map = {
chrome: "chrome",
edge: "edge",
firefox: "firefox",
hermes: false,
ie: "ie",
ios: "ios_saf",
node: false,
opera: "opera",
rhino: false,
safari: "safari"
};
const esMap = {
// https://caniuse.com/?search=es2015
2015: ["chrome49", "edge13", "safari10", "firefox44", "opera36"],
// https://caniuse.com/?search=es2016
2016: ["chrome50", "edge13", "safari10", "firefox43", "opera37"],
// https://caniuse.com/?search=es2017
2017: ["chrome58", "edge15", "safari11", "firefox52", "opera45"],
// https://caniuse.com/?search=es2018
2018: ["chrome63", "edge79", "safari12", "firefox58", "opera50"],
// https://caniuse.com/?search=es2019
2019: ["chrome73", "edge79", "safari12.1", "firefox64", "opera60"],
// https://caniuse.com/?search=es2020
2020: ["chrome80", "edge80", "safari14.1", "firefox80", "opera67"],
// https://caniuse.com/?search=es2021
2021: ["chrome85", "edge85", "safari14.1", "firefox80", "opera71"],
// https://caniuse.com/?search=es2022
2022: ["chrome94", "edge94", "safari16.4", "firefox93", "opera80"]
};
const esRE = /es(\d{4})/;
const versionRE = /\d/;
const convertTargetsCache = /* @__PURE__ */ new Map();
const convertTargets = (esbuildTarget) => {
if (!esbuildTarget) return {};
const cached = convertTargetsCache.get(esbuildTarget);
if (cached) return cached;
const targets = {};
const entriesWithoutES = arraify(esbuildTarget).flatMap((e) => {
const match = esRE.exec(e);
if (!match) return e;
const year = Number(match[1]);
if (!esMap[year]) throw new Error(`Unsupported target "${e}"`);
return esMap[year];
});
for (const entry of entriesWithoutES) {
if (entry === "esnext") continue;
const index = entry.search(versionRE);
if (index >= 0) {
const browser = map[entry.slice(0, index)];
if (browser === false) continue;
if (browser) {
const [major, minor = 0] = entry.slice(index).split(".").map((v) => parseInt(v, 10));
if (!isNaN(major) && !isNaN(minor)) {
const version = major << 16 | minor << 8;
if (!targets[browser] || version < targets[browser]) {
targets[browser] = version;
}
continue;
}
}
}
throw new Error(`Unsupported target "${entry}"`);
}
convertTargetsCache.set(esbuildTarget, targets);
return targets;
};
const HASH_RE = /#/g;
const AMPERSAND_RE = /&/g;
const SLASH_RE = /\//g;
const EQUAL_RE = /=/g;
const PLUS_RE = /\+/g;
const ENC_CARET_RE = /%5e/gi;
const ENC_BACKTICK_RE = /%60/gi;
const ENC_PIPE_RE = /%7c/gi;
const ENC_SPACE_RE = /%20/gi;
function encode(text) {
return encodeURI("" + text).replace(ENC_PIPE_RE, "|");
}
function encodeQueryValue(input) {
return encode(typeof input === "string" ? input : JSON.stringify(input)).replace(PLUS_RE, "%2B").replace(ENC_SPACE_RE, "+").replace(HASH_RE, "%23").replace(AMPERSAND_RE, "%26").replace(ENC_BACKTICK_RE, "`").replace(ENC_CARET_RE, "^").replace(SLASH_RE, "%2F");
}
function encodeQueryKey(text) {
return encodeQueryValue(text).replace(EQUAL_RE, "%3D");
}
function encodeQueryItem(key, value) {
if (typeof value === "number" || typeof value === "boolean") {
value = String(value);
}
if (!value) {
return encodeQueryKey(key);
}
if (Array.isArray(value)) {
return value.map((_value) => `${encodeQueryKey(key)}=${encodeQueryValue(_value)}`).join("&");
}
return `${encodeQueryKey(key)}=${encodeQueryValue(value)}`;
}
function stringifyQuery(query) {
return Object.keys(query).filter((k) => query[k] !== void 0).map((k) => encodeQueryItem(k, query[k])).filter(Boolean).join("&");
}
new Set(builtinModules);
function clearImports(imports) {
return (imports || "").replace(/(\/\/[^\n]*\n|\/\*.*\*\/)/g, "").replace(/\s+/g, " ");
}
function getImportNames(cleanedImports) {
const topLevelImports = cleanedImports.replace(/{([^}]*)}/, "");
const namespacedImport = topLevelImports.match(/\* as \s*(\S*)/)?.[1];
const defaultImport = topLevelImports.split(",").find((index) => !/[*{}]/.test(index))?.trim() || void 0;
return {
namespacedImport,
defaultImport
};
}
/**
* @typedef ErrnoExceptionFields
* @property {number | undefined} [errnode]
* @property {string | undefined} [code]
* @property {string | undefined} [path]
* @property {string | undefined} [syscall]
* @property {string | undefined} [url]
*
* @typedef {Error & ErrnoExceptionFields} ErrnoException
*/
const own$1 = {}.hasOwnProperty;
const classRegExp = /^([A-Z][a-z\d]*)+$/;
// Sorted by a rough estimate on most frequently used entries.
const kTypes = new Set([
'string',
'function',
'number',
'object',
// Accept 'Function' and 'Object' as alternative to the lower cased version.
'Function',
'Object',
'boolean',
'bigint',
'symbol'
]);
/**
* Create a list string in the form like 'A and B' or 'A, B, ..., and Z'.
* We cannot use Intl.ListFormat because it's not available in
* --without-intl builds.
*
* @param {Array<string>} array
* An array of strings.
* @param {string} [type]
* The list type to be inserted before the last element.
* @returns {string}
*/
function formatList(array, type = 'and') {
return array.length < 3
? array.join(` ${type} `)
: `${array.slice(0, -1).join(', ')}, ${type} ${array[array.length - 1]}`
}
/** @type {Map<string, MessageFunction | string>} */
const messages = new Map();
const nodeInternalPrefix = '__node_internal_';
/** @type {number} */
let userStackTraceLimit;
createError(
'ERR_INVALID_ARG_TYPE',
/**
* @param {string} name
* @param {Array<string> | string} expected
* @param {unknown} actual
*/
(name, expected, actual) => {
assert$1(typeof name === 'string', "'name' must be a string");
if (!Array.isArray(expected)) {
expected = [expected];
}
let message = 'The ';
if (name.endsWith(' argument')) {
// For cases like 'first argument'
message += `${name} `;
} else {
const type = name.includes('.') ? 'property' : 'argument';
message += `"${name}" ${type} `;
}
message += 'must be ';
/** @type {Array<string>} */
const types = [];
/** @type {Array<string>} */
const instances = [];
/** @type {Array<string>} */
const other = [];
for (const value of expected) {
assert$1(
typeof value === 'string',
'All expected entries have to be of type string'
);
if (kTypes.has(value)) {
types.push(value.toLowerCase());
} else if (classRegExp.exec(value) === null) {
assert$1(
value !== 'object',
'The value "object" should be written as "Object"'
);
other.push(value);
} else {
instances.push(value);
}
}
// Special handle `object` in case other instances are allowed to outline
// the differences between each other.
if (instances.length > 0) {
const pos = types.indexOf('object');
if (pos !== -1) {
types.slice(pos, 1);
instances.push('Object');
}
}
if (types.length > 0) {
message += `${types.length > 1 ? 'one of type' : 'of type'} ${formatList(
types,
'or'
)}`;
if (instances.length > 0 || other.length > 0) message += ' or ';
}
if (instances.length > 0) {
message += `an instance of ${formatList(instances, 'or')}`;
if (other.length > 0) message += ' or ';
}
if (other.length > 0) {
if (other.length > 1) {
message += `one of ${formatList(other, 'or')}`;
} else {
if (other[0].toLowerCase() !== other[0]) message += 'an ';
message += `${other[0]}`;
}
}
message += `. Received ${determineSpecificType(actual)}`;
return message
},
TypeError
);
createError(
'ERR_INVALID_MODULE_SPECIFIER',
/**
* @param {string} request
* @param {string} reason
* @param {string} [base]
*/
(request, reason, base = undefined) => {
return `Invalid module "${request}" ${reason}${
base ? ` imported from ${base}` : ''
}`
},
TypeError
);
createError(
'ERR_INVALID_PACKAGE_CONFIG',
/**
* @param {string} path
* @param {string} [base]
* @param {string} [message]
*/
(path, base, message) => {
return `Invalid package config ${path}${
base ? ` while importing ${base}` : ''
}${message ? `. ${message}` : ''}`
},
Error
);
createError(
'ERR_INVALID_PACKAGE_TARGET',
/**
* @param {string} packagePath
* @param {string} key
* @param {unknown} target
* @param {boolean} [isImport=false]
* @param {string} [base]
*/
(packagePath, key, target, isImport = false, base = undefined) => {
const relatedError =
typeof target === 'string' &&
!isImport &&
target.length > 0 &&
!target.startsWith('./');
if (key === '.') {
assert$1(isImport === false);
return (
`Invalid "exports" main target ${JSON.stringify(target)} defined ` +
`in the package config ${packagePath}package.json${
base ? ` imported from ${base}` : ''
}${relatedError ? '; targets must start with "./"' : ''}`
)
}
return `Invalid "${
isImport ? 'imports' : 'exports'
}" target ${JSON.stringify(
target
)} defined for '${key}' in the package config ${packagePath}package.json${
base ? ` imported from ${base}` : ''
}${relatedError ? '; targets must start with "./"' : ''}`
},
Error
);
createError(
'ERR_MODULE_NOT_FOUND',
/**
* @param {string} path
* @param {string} base
* @param {boolean} [exactUrl]
*/
(path, base, exactUrl = false) => {
return `Cannot find ${
exactUrl ? 'module' : 'package'
} '${path}' imported from ${base}`
},
Error
);
createError(
'ERR_NETWORK_IMPORT_DISALLOWED',
"import of '%s' by %s is not supported: %s",
Error
);
createError(
'ERR_PACKAGE_IMPORT_NOT_DEFINED',
/**
* @param {string} specifier
* @param {string} packagePath
* @param {string} base
*/
(specifier, packagePath, base) => {
return `Package import specifier "${specifier}" is not defined${
packagePath ? ` in package ${packagePath}package.json` : ''
} imported from ${base}`
},
TypeError
);
createError(
'ERR_PACKAGE_PATH_NOT_EXPORTED',
/**
* @param {string} packagePath
* @param {string} subpath
* @param {string} [base]
*/
(packagePath, subpath, base = undefined) => {
if (subpath === '.')
return `No "exports" main defined in ${packagePath}package.json${
base ? ` imported from ${base}` : ''
}`
return `Package subpath '${subpath}' is not defined by "exports" in ${packagePath}package.json${
base ? ` imported from ${base}` : ''
}`
},
Error
);
createError(
'ERR_UNSUPPORTED_DIR_IMPORT',
"Directory import '%s' is not supported " +
'resolving ES modules imported from %s',
Error
);
createError(
'ERR_UNSUPPORTED_RESOLVE_REQUEST',
'Failed to resolve module specifier "%s" from "%s": Invalid relative URL or base scheme is not hierarchical.',
TypeError
);
createError(
'ERR_UNKNOWN_FILE_EXTENSION',
/**
* @param {string} extension
* @param {string} path
*/
(extension, path) => {
return `Unknown file extension "${extension}" for ${path}`
},
TypeError
);
createError(
'ERR_INVALID_ARG_VALUE',
/**
* @param {string} name
* @param {unknown} value
* @param {string} [reason='is invalid']
*/
(name, value, reason = 'is invalid') => {
let inspected = inspect(value);
if (inspected.length > 128) {
inspected = `${inspected.slice(0, 128)}...`;
}
const type = name.includes('.') ? 'property' : 'argument';
return `The ${type} '${name}' ${reason}. Received ${inspected}`
},
TypeError
// Note: extra classes have been shaken out.
// , RangeError
);
/**
* Utility function for registering the error codes. Only used here. Exported
* *only* to allow for testing.
* @param {string} sym
* @param {MessageFunction | string} value
* @param {ErrorConstructor} constructor
* @returns {new (...parameters: Array<any>) => Error}
*/
function createError(sym, value, constructor) {
// Special case for SystemError that formats the error message differently
// The SystemErrors only have SystemError as their base classes.
messages.set(sym, value);
return makeNodeErrorWithCode(constructor, sym)
}
/**
* @param {ErrorConstructor} Base
* @param {string} key
* @returns {ErrorConstructor}
*/
function makeNodeErrorWithCode(Base, key) {
// @ts-expect-error It’s a Node error.
return NodeError
/**
* @param {Array<unknown>} parameters
*/
function NodeError(...parameters) {
const limit = Error.stackTraceLimit;
if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0;
const error = new Base();
// Reset the limit and setting the name property.
if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = limit;
const message = getMessage(key, parameters, error);
Object.defineProperties(error, {
// Note: no need to implement `kIsNodeError` symbol, would be hard,
// probably.
message: {
value: message,
enumerable: false,
writable: true,
configurable: true
},
toString: {
/** @this {Error} */
value() {
return `${this.name} [${key}]: ${this.message}`
},
enumerable: false,
writable: true,
configurable: true
}
});
captureLargerStackTrace(error);
// @ts-expect-error It’s a Node error.
error.code = key;
return error
}
}
/**
* @returns {boolean}
*/
function isErrorStackTraceLimitWritable() {
// Do no touch Error.stackTraceLimit as V8 would attempt to install
// it again during deserialization.
try {
if (v8.startupSnapshot.isBuildingSnapshot()) {
return false
}
} catch {}
const desc = Object.getOwnPropertyDescriptor(Error, 'stackTraceLimit');
if (desc === undefined) {
return Object.isExtensible(Error)
}
return own$1.call(desc, 'writable') && desc.writable !== undefined
? desc.writable
: desc.set !== undefined
}
/**
* This function removes unnecessary frames from Node.js core errors.
* @template {(...parameters: unknown[]) => unknown} T
* @param {T} wrappedFunction
* @returns {T}
*/
function hideStackFrames(wrappedFunction) {
// We rename the functions that will be hidden to cut off the stacktrace
// at the outermost one
const hidden = nodeInternalPrefix + wrappedFunction.name;
Object.defineProperty(wrappedFunction, 'name', {value: hidden});
return wrappedFunction
}
const captureLargerStackTrace = hideStackFrames(
/**
* @param {Error} error
* @returns {Error}
*/
// @ts-expect-error: fine
function (error) {
const stackTraceLimitIsWritable = isErrorStackTraceLimitWritable();
if (stackTraceLimitIsWritable) {
userStackTraceLimit = Error.stackTraceLimit;
Error.stackTraceLimit = Number.POSITIVE_INFINITY;
}
Error.captureStackTrace(error);
// Reset the limit
if (stackTraceLimitIsWritable) Error.stackTraceLimit = userStackTraceLimit;
return error
}
);
/**
* @param {string} key
* @param {Array<unknown>} parameters
* @param {Error} self
* @returns {string}
*/
function getMessage(key, parameters, self) {
const message = messages.get(key);
assert$1(message !== undefined, 'expected `message` to be found');
if (typeof message === 'function') {
assert$1(
message.length <= parameters.length, // Default options do not count.
`Code: ${key}; The provided arguments length (${parameters.length}) does not ` +
`match the required ones (${message.length}).`
);
return Reflect.apply(message, self, parameters)
}
const regex = /%[dfijoOs]/g;
let expectedLength = 0;
while (regex.exec(message) !== null) expectedLength++;
assert$1(
expectedLength === parameters.length,
`Code: ${key}; The provided arguments length (${parameters.length}) does not ` +
`match the required ones (${expectedLength}).`
);
if (parameters.length === 0) return message
parameters.unshift(message);
return Reflect.apply(format$2, null, parameters)
}
/**
* Determine the specific type of a value for type-mismatch errors.
* @param {unknown} value
* @returns {string}
*/
function determineSpecificType(value) {
if (value === null || value === undefined) {
return String(value)
}
if (typeof value === 'function' && value.name) {
return `function ${value.name}`
}
if (typeof value === 'object') {
if (value.constructor && value.constructor.name) {
return `an instance of ${value.constructor.name}`
}
return `${inspect(value, {depth: -1})}`
}
let inspected = inspect(value, {colors: false});
if (inspected.length > 28) {
inspected = `${inspected.slice(0, 25)}...`;
}
return `type ${typeof value} (${inspected})`
}
const ESM_STATIC_IMPORT_RE = /(?<=\s|^|;|\})import\s*([\s"']*(?<imports>[\p{L}\p{M}\w\t\n\r $*,/{}@.]+)from\s*)?["']\s*(?<specifier>(?<="\s*)[^"]*[^\s"](?=\s*")|(?<='\s*)[^']*[^\s'](?=\s*'))\s*["'][\s;]*/gmu;
const TYPE_RE = /^\s*?type\s/;
function parseStaticImport(matched) {
const cleanedImports = clearImports(matched.imports);
const namedImports = {};
const _matches = cleanedImports.match(/{([^}]*)}/)?.[1]?.split(",") || [];
for (const namedImport of _matches) {
const _match = namedImport.match(/^\s*(\S*) as (\S*)\s*$/);
const source = _match?.[1] || namedImport.trim();
const importName = _match?.[2] || source;
if (source && !TYPE_RE.test(source)) {
namedImports[source] = importName;
}
}
const { namespacedImport, defaultImport } = getImportNames(cleanedImports);
return {
...matched,
defaultImport,
namespacedImport,
namedImports
};
}
const ESM_RE = /([\s;]|^)(import[\s\w*,{}]*from|import\s*["'*{]|export\b\s*(?:[*{]|default|class|type|function|const|var|let|async function)|import\.meta\b)/m;
const COMMENT_RE = /\/\*.+?\*\/|\/\/.*(?=[nr])/g;
function hasESMSyntax(code, opts = {}) {
if (opts.stripComments) {
code = code.replace(COMMENT_RE, "");
}
return ESM_RE.test(code);
}
const { isMatch: isMatch$1, scan } = micromatch$2;
function getAffectedGlobModules(file, server) {
const modules = [];
for (const [id, allGlobs] of server._importGlobMap) {
if (allGlobs.some(
({ affirmed, negated }) => (!affirmed.length || affirmed.some((glob) => isMatch$1(file, glob))) && (!negated.length || negated.every((glob) => isMatch$1(file, glob)))
)) {
const mod = server.moduleGraph.getModuleById(id);
if (mod) modules.push(mod);
}
}
modules.forEach((i) => {
if (i?.file) server.moduleGraph.onFileChange(i.file);
});
return modules;
}
function importGlobPlugin(config) {
let server;
return {
name: "vite:import-glob",
configureServer(_server) {
server = _server;
server._importGlobMap.clear();
},
async transform(code, id) {
if (!code.includes("import.meta.glob")) return;
const result = await transformGlobImport(
code,
id,
config.root,
(im, _, options) => this.resolve(im, id, options).then((i) => i?.id || im),
config.experimental.importGlobRestoreExtension,
config.logger
);
if (result) {
if (server) {
const allGlobs = result.matches.map((i) => i.globsResolved);
server._importGlobMap.set(
id,
allGlobs.map((globs) => {
const affirmed = [];
const negated = [];
for (const glob of globs) {
(glob[0] === "!" ? negated : affirmed).push(glob);
}
return { affirmed, negated };
})
);
}
return transformStableResult(result.s, id, config);
}
}
};
}
const importGlobRE = /\bimport\.meta\.glob(?:<\w+>)?\s*\(/g;
const knownOptions = {
as: ["string"],
eager: ["boolean"],
import: ["string"],
exhaustive: ["boolean"],
query: ["object", "string"]
};
const forceDefaultAs = ["raw", "url"];
function err$1(e, pos) {
const error = new Error(e);
error.pos = pos;
return error;
}
function parseGlobOptions(rawOpts, optsStartIndex, logger) {
let opts = {};
try {
opts = evalValue(rawOpts);
} catch {
throw err$1(
"Vite is unable to parse the glob options as the value is not static",
optsStartIndex
);
}
if (opts == null) {
return {};
}
for (const key in opts) {
if (!(key in knownOptions)) {
throw err$1(`Unknown glob option "${key}"`, optsStartIndex);
}
const allowedTypes = knownOptions[key];
const valueType = typeof opts[key];
if (!allowedTypes.includes(valueType)) {
throw err$1(
`Expected glob option "${key}" to be of type ${allowedTypes.join(
" or "
)}, but got ${valueType}`,
optsStartIndex
);
}
}
if (typeof opts.query === "object") {
for (const key in opts.query) {
const value = opts.query[key];
if (!["string", "number", "boolean"].includes(typeof value)) {
throw err$1(
`Expected glob option "query.${key}" to be of type string, number, or boolean, but got ${typeof value}`,
optsStartIndex
);
}
}
opts.query = stringifyQuery(opts.query);
}
if (opts.as && logger) {
const importSuggestion = forceDefaultAs.includes(opts.as) ? `, import: 'default'` : "";
logger.warn(
colors$1.yellow(
`The glob option "as" has been deprecated in favour of "query". Please update \`as: '${opts.as}'\` to \`query: '?${opts.as}'${importSuggestion}\`.`
)
);
}
if (opts.as && forceDefaultAs.includes(opts.as)) {
if (opts.import && opts.import !== "default" && opts.import !== "*")
throw err$1(
`Option "import" can only be "default" or "*" when "as" is "${opts.as}", but got "${opts.import}"`,
optsStartIndex
);
opts.import = opts.import || "default";
}
if (opts.as && opts.query)
throw err$1(
'Options "as" and "query" cannot be used together',
optsStartIndex
);
if (opts.as) opts.query = opts.as;
if (opts.query && opts.query[0] !== "?") opts.query = `?${opts.query}`;
return opts;
}
async function parseImportGlob(code, importer, root, resolveId, logger) {
let cleanCode;
try {
cleanCode = stripLiteral(code);
} catch (e) {
return [];
}
const matches = Array.from(cleanCode.matchAll(importGlobRE));
const tasks = matches.map(async (match, index) => {
const start = match.index;
const err2 = (msg) => {
const e = new Error(`Invalid glob import syntax: ${msg}`);
e.pos = start;
return e;
};
const end = findCorrespondingCloseParenthesisPosition(
cleanCode,
start + match[0].length
) + 1;
if (end <= 0) {
throw err2("Close parenthesis not found");
}
const statementCode = code.slice(start, end);
const rootAst = (await parseAstAsync(statementCode)).body[0];
if (rootAst.type !== "ExpressionStatement") {
throw err2(`Expect CallExpression, got ${rootAst.type}`);
}
const ast = rootAst.expression;
if (ast.type !== "CallExpression") {
throw err2(`Expect CallExpression, got ${ast.type}`);
}
if (ast.arguments.length < 1 || ast.arguments.length > 2)
throw err2(`Expected 1-2 arguments, but got ${ast.arguments.length}`);
const arg1 = ast.arguments[0];
const arg2 = ast.arguments[1];
const globs = [];
const validateLiteral = (element) => {
if (!element) return;
if (element.type === "Literal") {
if (typeof element.value !== "string")
throw err2(
`Expected glob to be a string, but got "${typeof element.value}"`
);
globs.push(element.value);
} else if (element.type === "TemplateLiteral") {
if (element.expressions.length !== 0) {
throw err2(
`Expected glob to be a string, but got dynamic template literal`
);
}
globs.push(element.quasis[0].value.raw);
} else {
throw err2("Could only use literals");
}
};
if (arg1.type === "ArrayExpression") {
for (const element of arg1.elements) {
validateLiteral(element);
}
} else {
validateLiteral(arg1);
}
let options = {};
if (arg2) {
if (arg2.type !== "ObjectExpression")
throw err2(
`Expected the second argument to be an object literal, but got "${arg2.type}"`
);
options = parseGlobOptions(
code.slice(start + arg2.start, start + arg2.end),
start + arg2.start,
logger
);
}
const globsResolved = await Promise.all(
globs.map((glob) => toAbsoluteGlob(glob, root, importer, resolveId))
);
const isRelative = globs.every((i) => ".!".includes(i[0]));
return {
index,
globs,
globsResolved,
isRelative,
options,
start,
end
};
});
return (await Promise.all(tasks)).filter(Boolean);
}
function findCorrespondingCloseParenthesisPosition(cleanCode, openPos) {
const closePos = cleanCode.indexOf(")", openPos);
if (closePos < 0) return -1;
if (!cleanCode.slice(openPos, closePos).includes("(")) return closePos;
let remainingParenthesisCount = 0;
const cleanCodeLen = cleanCode.length;
for (let pos = openPos; pos < cleanCodeLen; pos++) {
switch (cleanCode[pos]) {
case "(": {
remainingParenthesisCount++;
break;
}
case ")": {
remainingParenthesisCount--;
if (remainingParenthesisCount <= 0) {
return pos;
}
}
}
}
return -1;
}
const importPrefix = "__vite_glob_";
const { basename, dirname, relative, join } = posix$1;
async function transformGlobImport(code, id, root, resolveId, restoreQueryExtension = false, logger) {
id = slash$1(id);
root = slash$1(root);
const isVirtual = isVirtualModule(id);
const dir = isVirtual ? void 0 : dirname(id);
const matches = await parseImportGlob(
code,
isVirtual ? void 0 : id,
root,
resolveId,
logger
);
const matchedFiles = /* @__PURE__ */ new Set();
if (!matches.length) return null;
const s = new MagicString(code);
const staticImports = (await Promise.all(
matches.map(
async ({ globsResolved, isRelative, options, index, start, end }) => {
const cwd = getCommonBase(globsResolved) ?? root;
const files = (await glob(globsResolved, {
cwd,
absolute: true,
dot: !!options.exhaustive,
ignore: options.exhaustive ? [] : [join(cwd, "**/node_modules/**")]
})).filter((file) => file !== id).sort();
const objectProps = [];
const staticImports2 = [];
const resolvePaths = (file) => {
if (!dir) {
if (isRelative)
throw new Error(
"In virtual modules, all globs must start with '/'"
);
const filePath2 = `/${relative(root, file)}`;
return { filePath: filePath2, importPath: filePath2 };
}
let importPath = relative(dir, file);
if (importPath[0] !== ".") importPath = `./${importPath}`;
let filePath;
if (isRelative) {
filePath = importPath;
} else {
filePath = relative(root, file);
if (filePath[0] !== ".") filePath = `/${filePath}`;
}
return { filePath, importPath };
};
files.forEach((file, i) => {
const paths = resolvePaths(file);
const filePath = paths.filePath;
let importPath = paths.importPath;
let importQuery = options.query ?? "";
if (importQuery && importQuery !== "?raw") {
const fileExtension = basename(file).split(".").slice(-1)[0];
if (fileExtension && restoreQueryExtension)
importQuery = `${importQuery}&lang.${fileExtension}`;
}
importPath = `${importPath}${importQuery}`;
const importKey = options.import && options.import !== "*" ? options.import : void 0;
if (options.eager) {
const variableName = `${importPrefix}${index}_${i}`;
const expression = importKey ? `{ ${importKey} as ${variableName} }` : `* as ${variableName}`;
staticImports2.push(
`import ${expression} from ${JSON.stringify(importPath)}`
);
objectProps.push(`${JSON.stringify(filePath)}: ${variableName}`);
} else {
let importStatement = `import(${JSON.stringify(importPath)})`;
if (importKey)
importStatement += `.then(m => m[${JSON.stringify(importKey)}])`;
objectProps.push(
`${JSON.stringify(filePath)}: () => ${importStatement}`
);
}
});
files.forEach((i) => matchedFiles.add(i));
const originalLineBreakCount = code.slice(start, end).match(/\n/g)?.length ?? 0;
const lineBreaks = originalLineBreakCount > 0 ? "\n".repeat(originalLineBreakCount) : "";
const replacement = `/* #__PURE__ */ Object.assign({${objectProps.join(
","
)}${lineBreaks}})`;
s.overwrite(start, end, replacement);
return staticImports2;
}
)
)).flat();
if (staticImports.length) s.prepend(`${staticImports.join(";")};`);
return {
s,
matches,
files: matchedFiles
};
}
function globSafePath(path) {
return glob.escapePath(normalizePath$3(path));
}
function lastNthChar(str, n) {
return str.charAt(str.length - 1 - n);
}
function globSafeResolvedPath(resolved, glob) {
let numEqual = 0;
const maxEqual = Math.min(resolved.length, glob.length);
while (numEqual < maxEqual && lastNthChar(resolved, numEqual) === lastNthChar(glob, numEqual)) {
numEqual += 1;
}
const staticPartEnd = resolved.length - numEqual;
const staticPart = resolved.slice(0, staticPartEnd);
const dynamicPart = resolved.slice(staticPartEnd);
return globSafePath(staticPart) + dynamicPart;
}
async function toAbsoluteGlob(glob, root, importer, resolveId) {
let pre = "";
if (glob[0] === "!") {
pre = "!";
glob = glob.slice(1);
}
root = globSafePath(root);
const dir = importer ? globSafePath(dirname(importer)) : root;
if (glob[0] === "/") return pre + posix$1.join(root, glob.slice(1));
if (glob.startsWith("./")) return pre + posix$1.join(dir, glob.slice(2));
if (glob.startsWith("../")) return pre + posix$1.join(dir, glob);
if (glob.startsWith("**")) return pre + glob;
const isSubImportsPattern = glob[0] === "#" && glob.includes("*");
const resolved = normalizePath$3(
await resolveId(glob, importer, {
custom: { "vite:import-glob": { isSubImportsPattern } }
}) || glob
);
if (isAbsolute$1(resolved)) {
return pre + globSafeResolvedPath(resolved, glob);
}
throw new Error(
`Invalid glob: "${glob}" (resolved: "${resolved}"). It must start with '/' or './'`
);
}
function getCommonBase(globsResolved) {
const bases = globsResolved.filter((g) => g[0] !== "!").map((glob) => {
let { base } = scan(glob);
if (posix$1.basename(base).includes(".")) base = posix$1.dirname(base);
return base;
});
if (!bases.length) return null;
let commonAncestor = "";
const dirS = bases[0].split("/");
for (let i = 0; i < dirS.length; i++) {
const candidate = dirS.slice(0, i + 1).join("/");
if (bases.every((base) => base.startsWith(candidate)))
commonAncestor = candidate;
else break;
}
if (!commonAncestor) commonAncestor = "/";
return commonAncestor;
}
function isVirtualModule(id) {
return id.startsWith("virtual:") || id[0] === "\0" || !id.includes("/");
}
var src = {exports: {}};
var browser = {exports: {}};
var debug$f = {exports: {}};
/**
* Helpers.
*/
var ms;
var hasRequiredMs;
function requireMs () {
if (hasRequiredMs) return ms;
hasRequiredMs = 1;
var s = 1000;
var m = s * 60;
var h = m * 60;
var d = h * 24;
var y = d * 365.25;
/**
* Parse or format the given `val`.
*
* Options:
*
* - `long` verbose formatting [false]
*
* @param {String|Number} val
* @param {Object} [options]
* @throws {Error} throw an error if val is not a non-empty string or a number
* @return {String|Number}
* @api public
*/
ms = function(val, options) {
options = options || {};
var type = typeof val;
if (type === 'string' && val.length > 0) {
return parse(val);
} else if (type === 'number' && isNaN(val) === false) {
return options.long ? fmtLong(val) : fmtShort(val);
}
throw new Error(
'val is not a non-empty string or a valid number. val=' +
JSON.stringify(val)
);
};
/**
* Parse the given `str` and return milliseconds.
*
* @param {String} str
* @return {Number}
* @api private
*/
function parse(str) {
str = String(str);
if (str.length > 100) {
return;
}
var match = /^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec(
str
);
if (!match) {
return;
}
var n = parseFloat(match[1]);
var type = (match[2] || 'ms').toLowerCase();
switch (type) {
case 'years':
case 'year':
case 'yrs':
case 'yr':
case 'y':
return n * y;
case 'days':
case 'day':
case 'd':
return n * d;
case 'hours':
case 'hour':
case 'hrs':
case 'hr':
case 'h':
return n * h;
case 'minutes':
case 'minute':
case 'mins':
case 'min':
case 'm':
return n * m;
case 'seconds':
case 'second':
case 'secs':
case 'sec':
case 's':
return n * s;
case 'milliseconds':
case 'millisecond':
case 'msecs':
case 'msec':
case 'ms':
return n;
default:
return undefined;
}
}
/**
* Short format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtShort(ms) {
if (ms >= d) {
return Math.round(ms / d) + 'd';
}
if (ms >= h) {
return Math.round(ms / h) + 'h';
}
if (ms >= m) {
return Math.round(ms / m) + 'm';
}
if (ms >= s) {
return Math.round(ms / s) + 's';
}
return ms + 'ms';
}
/**
* Long format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtLong(ms) {
return plural(ms, d, 'day') ||
plural(ms, h, 'hour') ||
plural(ms, m, 'minute') ||
plural(ms, s, 'second') ||
ms + ' ms';
}
/**
* Pluralization helper.
*/
function plural(ms, n, name) {
if (ms < n) {
return;
}
if (ms < n * 1.5) {
return Math.floor(ms / n) + ' ' + name;
}
return Math.ceil(ms / n) + ' ' + name + 's';
}
return ms;
}
var hasRequiredDebug;
function requireDebug () {
if (hasRequiredDebug) return debug$f.exports;
hasRequiredDebug = 1;
(function (module, exports) {
/**
* This is the common logic for both the Node.js and web browser
* implementations of `debug()`.
*
* Expose `debug()` as the module.
*/
exports = module.exports = createDebug.debug = createDebug['default'] = createDebug;
exports.coerce = coerce;
exports.disable = disable;
exports.enable = enable;
exports.enabled = enabled;
exports.humanize = requireMs();
/**
* The currently active debug mode names, and names to skip.
*/
exports.names = [];
exports.skips = [];
/**
* Map of special "%n" handling functions, for the debug "format" argument.
*
* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
*/
exports.formatters = {};
/**
* Previous log timestamp.
*/
var prevTime;
/**
* Select a color.
* @param {String} namespace
* @return {Number}
* @api private
*/
function selectColor(namespace) {
var hash = 0, i;
for (i in namespace) {
hash = ((hash << 5) - hash) + namespace.charCodeAt(i);
hash |= 0; // Convert to 32bit integer
}
return exports.colors[Math.abs(hash) % exports.colors.length];
}
/**
* Create a debugger with the given `namespace`.
*
* @param {String} namespace
* @return {Function}
* @api public
*/
function createDebug(namespace) {
function debug() {
// disabled?
if (!debug.enabled) return;
var self = debug;
// set `diff` timestamp
var curr = +new Date();
var ms = curr - (prevTime || curr);
self.diff = ms;
self.prev = prevTime;
self.curr = curr;
prevTime = curr;
// turn the `arguments` into a proper Array
var args = new Array(arguments.length);
for (var i = 0; i < args.length; i++) {
args[i] = arguments[i];
}
args[0] = exports.coerce(args[0]);
if ('string' !== typeof args[0]) {
// anything else let's inspect with %O
args.unshift('%O');
}
// apply any `formatters` transformations
var index = 0;
args[0] = args[0].replace(/%([a-zA-Z%])/g, function(match, format) {
// if we encounter an escaped % then don't increase the array index
if (match === '%%') return match;
index++;
var formatter = exports.formatters[format];
if ('function' === typeof formatter) {
var val = args[index];
match = formatter.call(self, val);
// now we need to remove `args[index]` since it's inlined in the `format`
args.splice(index, 1);
index--;
}
return match;
});
// apply env-specific formatting (colors, etc.)
exports.formatArgs.call(self, args);
var logFn = debug.log || exports.log || console.log.bind(console);
logFn.apply(self, args);
}
debug.namespace = namespace;
debug.enabled = exports.enabled(namespace);
debug.useColors = exports.useColors();
debug.color = selectColor(namespace);
// env-specific initialization logic for debug instances
if ('function' === typeof exports.init) {
exports.init(debug);
}
return debug;
}
/**
* Enables a debug mode by namespaces. This can include modes
* separated by a colon and wildcards.
*
* @param {String} namespaces
* @api public
*/
function enable(namespaces) {
exports.save(namespaces);
exports.names = [];
exports.skips = [];
var split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/);
var len = split.length;
for (var i = 0; i < len; i++) {
if (!split[i]) continue; // ignore empty strings
namespaces = split[i].replace(/\*/g, '.*?');
if (namespaces[0] === '-') {
exports.skips.push(new RegExp('^' + namespaces.substr(1) + '$'));
} else {
exports.names.push(new RegExp('^' + namespaces + '$'));
}
}
}
/**
* Disable debug output.
*
* @api public
*/
function disable() {
exports.enable('');
}
/**
* Returns true if the given mode name is enabled, false otherwise.
*
* @param {String} name
* @return {Boolean}
* @api public
*/
function enabled(name) {
var i, len;
for (i = 0, len = exports.skips.length; i < len; i++) {
if (exports.skips[i].test(name)) {
return false;
}
}
for (i = 0, len = exports.names.length; i < len; i++) {
if (exports.names[i].test(name)) {
return true;
}
}
return false;
}
/**
* Coerce `val`.
*
* @param {Mixed} val
* @return {Mixed}
* @api private
*/
function coerce(val) {
if (val instanceof Error) return val.stack || val.message;
return val;
}
} (debug$f, debug$f.exports));
return debug$f.exports;
}
/**
* This is the web browser implementation of `debug()`.
*
* Expose `debug()` as the module.
*/
var hasRequiredBrowser;
function requireBrowser () {
if (hasRequiredBrowser) return browser.exports;
hasRequiredBrowser = 1;
(function (module, exports) {
exports = module.exports = requireDebug();
exports.log = log;
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
exports.storage = 'undefined' != typeof chrome
&& 'undefined' != typeof chrome.storage
? chrome.storage.local
: localstorage();
/**
* Colors.
*/
exports.colors = [
'lightseagreen',
'forestgreen',
'goldenrod',
'dodgerblue',
'darkorchid',
'crimson'
];
/**
* Currently only WebKit-based Web Inspectors, Firefox >= v31,
* and the Firebug extension (any Firefox version) are known
* to support "%c" CSS customizations.
*
* TODO: add a `localStorage` variable to explicitly enable/disable colors
*/
function useColors() {
// NB: In an Electron preload script, document will be defined but not fully
// initialized. Since we know we're in Chrome, we'll just detect this case
// explicitly
if (typeof window !== 'undefined' && window.process && window.process.type === 'renderer') {
return true;
}
// is webkit? http://stackoverflow.com/a/16459606/376773
// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632
return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||
// is firebug? http://stackoverflow.com/a/398120/376773
(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||
// is firefox >= v31?
// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) ||
// double check webkit in userAgent just in case we are in a worker
(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/));
}
/**
* Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.
*/
exports.formatters.j = function(v) {
try {
return JSON.stringify(v);
} catch (err) {
return '[UnexpectedJSONParseError]: ' + err.message;
}
};
/**
* Colorize log arguments if enabled.
*
* @api public
*/
function formatArgs(args) {
var useColors = this.useColors;
args[0] = (useColors ? '%c' : '')
+ this.namespace
+ (useColors ? ' %c' : ' ')
+ args[0]
+ (useColors ? '%c ' : ' ')
+ '+' + exports.humanize(this.diff);
if (!useColors) return;
var c = 'color: ' + this.color;
args.splice(1, 0, c, 'color: inherit');
// the final "%c" is somewhat tricky, because there could be other
// arguments passed either before or after the %c, so we need to
// figure out the correct index to insert the CSS into
var index = 0;
var lastC = 0;
args[0].replace(/%[a-zA-Z%]/g, function(match) {
if ('%%' === match) return;
index++;
if ('%c' === match) {
// we only are interested in the *last* %c
// (the user may have provided their own)
lastC = index;
}
});
args.splice(lastC, 0, c);
}
/**
* Invokes `console.log()` when available.
* No-op when `console.log` is not a "function".
*
* @api public
*/
function log() {
// this hackery is required for IE8/9, where
// the `console.log` function doesn't have 'apply'
return 'object' === typeof console
&& console.log
&& Function.prototype.apply.call(console.log, console, arguments);
}
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
try {
if (null == namespaces) {
exports.storage.removeItem('debug');
} else {
exports.storage.debug = namespaces;
}
} catch(e) {}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
var r;
try {
r = exports.storage.debug;
} catch(e) {}
// If debug isn't set in LS, and we're in Electron, try to load $DEBUG
if (!r && typeof process !== 'undefined' && 'env' in process) {
r = process.env.DEBUG;
}
return r;
}
/**
* Enable namespaces listed in `localStorage.debug` initially.
*/
exports.enable(load());
/**
* Localstorage attempts to return the localstorage.
*
* This is necessary because safari throws
* when a user disables cookies/localstorage
* and you attempt to access it.
*
* @return {LocalStorage}
* @api private
*/
function localstorage() {
try {
return window.localStorage;
} catch (e) {}
}
} (browser, browser.exports));
return browser.exports;
}
var node = {exports: {}};
/**
* Module dependencies.
*/
var hasRequiredNode;
function requireNode () {
if (hasRequiredNode) return node.exports;
hasRequiredNode = 1;
(function (module, exports) {
var tty = require$$0$3;
var util = require$$0$5;
/**
* This is the Node.js implementation of `debug()`.
*
* Expose `debug()` as the module.
*/
exports = module.exports = requireDebug();
exports.init = init;
exports.log = log;
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
/**
* Colors.
*/
exports.colors = [6, 2, 3, 4, 5, 1];
/**
* Build up the default `inspectOpts` object from the environment variables.
*
* $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
*/
exports.inspectOpts = Object.keys(process.env).filter(function (key) {
return /^debug_/i.test(key);
}).reduce(function (obj, key) {
// camel-case
var prop = key
.substring(6)
.toLowerCase()
.replace(/_([a-z])/g, function (_, k) { return k.toUpperCase() });
// coerce string value into JS value
var val = process.env[key];
if (/^(yes|on|true|enabled)$/i.test(val)) val = true;
else if (/^(no|off|false|disabled)$/i.test(val)) val = false;
else if (val === 'null') val = null;
else val = Number(val);
obj[prop] = val;
return obj;
}, {});
/**
* The file descriptor to write the `debug()` calls to.
* Set the `DEBUG_FD` env variable to override with another value. i.e.:
*
* $ DEBUG_FD=3 node script.js 3>debug.log
*/
var fd = parseInt(process.env.DEBUG_FD, 10) || 2;
if (1 !== fd && 2 !== fd) {
util.deprecate(function(){}, 'except for stderr(2) and stdout(1), any other usage of DEBUG_FD is deprecated. Override debug.log if you want to use a different log function (https://git.io/debug_fd)')();
}
var stream = 1 === fd ? process.stdout :
2 === fd ? process.stderr :
createWritableStdioStream(fd);
/**
* Is stdout a TTY? Colored output is enabled when `true`.
*/
function useColors() {
return 'colors' in exports.inspectOpts
? Boolean(exports.inspectOpts.colors)
: tty.isatty(fd);
}
/**
* Map %o to `util.inspect()`, all on a single line.
*/
exports.formatters.o = function(v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts)
.split('\n').map(function(str) {
return str.trim()
}).join(' ');
};
/**
* Map %o to `util.inspect()`, allowing multiple lines if needed.
*/
exports.formatters.O = function(v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts);
};
/**
* Adds ANSI color escape codes if enabled.
*
* @api public
*/
function formatArgs(args) {
var name = this.namespace;
var useColors = this.useColors;
if (useColors) {
var c = this.color;
var prefix = ' \u001b[3' + c + ';1m' + name + ' ' + '\u001b[0m';
args[0] = prefix + args[0].split('\n').join('\n' + prefix);
args.push('\u001b[3' + c + 'm+' + exports.humanize(this.diff) + '\u001b[0m');
} else {
args[0] = new Date().toUTCString()
+ ' ' + name + ' ' + args[0];
}
}
/**
* Invokes `util.format()` with the specified arguments and writes to `stream`.
*/
function log() {
return stream.write(util.format.apply(util, arguments) + '\n');
}
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
if (null == namespaces) {
// If you set a process.env field to null or undefined, it gets cast to the
// string 'null' or 'undefined'. Just delete instead.
delete process.env.DEBUG;
} else {
process.env.DEBUG = namespaces;
}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
return process.env.DEBUG;
}
/**
* Copied from `node/src/node.js`.
*
* XXX: It's lame that node doesn't expose this API out-of-the-box. It also
* relies on the undocumented `tty_wrap.guessHandleType()` which is also lame.
*/
function createWritableStdioStream (fd) {
var stream;
var tty_wrap = process.binding('tty_wrap');
// Note stream._type is used for test-module-load-list.js
switch (tty_wrap.guessHandleType(fd)) {
case 'TTY':
stream = new tty.WriteStream(fd);
stream._type = 'tty';
// Hack to have stream not keep the event loop alive.
// See https://github.com/joyent/node/issues/1726
if (stream._handle && stream._handle.unref) {
stream._handle.unref();
}
break;
case 'FILE':
var fs = require$$0__default;
stream = new fs.SyncWriteStream(fd, { autoClose: false });
stream._type = 'fs';
break;
case 'PIPE':
case 'TCP':
var net = require$$4$1;
stream = new net.Socket({
fd: fd,
readable: false,
writable: true
});
// FIXME Should probably have an option in net.Socket to create a
// stream from an existing fd which is writable only. But for now
// we'll just add this hack and set the `readable` member to false.
// Test: ./node test/fixtures/echo.js < /etc/passwd
stream.readable = false;
stream.read = null;
stream._type = 'pipe';
// FIXME Hack to have stream not keep the event loop alive.
// See https://github.com/joyent/node/issues/1726
if (stream._handle && stream._handle.unref) {
stream._handle.unref();
}
break;
default:
// Probably an error on in uv_guess_handle()
throw new Error('Implement me. Unknown stream file type!');
}
// For supporting legacy API we put the FD here.
stream.fd = fd;
stream._isStdio = true;
return stream;
}
/**
* Init logic for `debug` instances.
*
* Create a new `inspectOpts` object in case `useColors` is set
* differently for a particular `debug` instance.
*/
function init (debug) {
debug.inspectOpts = {};
var keys = Object.keys(exports.inspectOpts);
for (var i = 0; i < keys.length; i++) {
debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
}
}
/**
* Enable namespaces listed in `process.env.DEBUG` initially.
*/
exports.enable(load());
} (node, node.exports));
return node.exports;
}
/**
* Detect Electron renderer process, which is node, but we should
* treat as a browser.
*/
if (typeof process !== 'undefined' && process.type === 'renderer') {
src.exports = requireBrowser();
} else {
src.exports = requireNode();
}
var srcExports = src.exports;
/*!
* encodeurl
* Copyright(c) 2016 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module exports.
* @public
*/
var encodeurl = encodeUrl$1;
/**
* RegExp to match non-URL code points, *after* encoding (i.e. not including "%")
* and including invalid escape sequences.
* @private
*/
var ENCODE_CHARS_REGEXP = /(?:[^\x21\x25\x26-\x3B\x3D\x3F-\x5B\x5D\x5F\x61-\x7A\x7E]|%(?:[^0-9A-Fa-f]|[0-9A-Fa-f][^0-9A-Fa-f]|$))+/g;
/**
* RegExp to match unmatched surrogate pair.
* @private
*/
var UNMATCHED_SURROGATE_PAIR_REGEXP = /(^|[^\uD800-\uDBFF])[\uDC00-\uDFFF]|[\uD800-\uDBFF]([^\uDC00-\uDFFF]|$)/g;
/**
* String to replace unmatched surrogate pair with.
* @private
*/
var UNMATCHED_SURROGATE_PAIR_REPLACE = '$1\uFFFD$2';
/**
* Encode a URL to a percent-encoded form, excluding already-encoded sequences.
*
* This function will take an already-encoded URL and encode all the non-URL
* code points. This function will not encode the "%" character unless it is
* not part of a valid sequence (`%20` will be left as-is, but `%foo` will
* be encoded as `%25foo`).
*
* This encode is meant to be "safe" and does not throw errors. It will try as
* hard as it can to properly encode the given URL, including replacing any raw,
* unpaired surrogate pairs with the Unicode replacement character prior to
* encoding.
*
* @param {string} url
* @return {string}
* @public
*/
function encodeUrl$1 (url) {
return String(url)
.replace(UNMATCHED_SURROGATE_PAIR_REGEXP, UNMATCHED_SURROGATE_PAIR_REPLACE)
.replace(ENCODE_CHARS_REGEXP, encodeURI)
}
/*!
* escape-html
* Copyright(c) 2012-2013 TJ Holowaychuk
* Copyright(c) 2015 Andreas Lubbe
* Copyright(c) 2015 Tiancheng "Timothy" Gu
* MIT Licensed
*/
/**
* Module variables.
* @private
*/
var matchHtmlRegExp = /["'&<>]/;
/**
* Module exports.
* @public
*/
var escapeHtml_1 = escapeHtml$1;
/**
* Escape special characters in the given string of html.
*
* @param {string} string The string to escape for inserting into HTML
* @return {string}
* @public
*/
function escapeHtml$1(string) {
var str = '' + string;
var match = matchHtmlRegExp.exec(str);
if (!match) {
return str;
}
var escape;
var html = '';
var index = 0;
var lastIndex = 0;
for (index = match.index; index < str.length; index++) {
switch (str.charCodeAt(index)) {
case 34: // "
escape = '"';
break;
case 38: // &
escape = '&';
break;
case 39: // '
escape = ''';
break;
case 60: // <
escape = '<';
break;
case 62: // >
escape = '>';
break;
default:
continue;
}
if (lastIndex !== index) {
html += str.substring(lastIndex, index);
}
lastIndex = index + 1;
html += escape;
}
return lastIndex !== index
? html + str.substring(lastIndex, index)
: html;
}
var escapeHtml$2 = /*@__PURE__*/getDefaultExportFromCjs(escapeHtml_1);
var onFinished$2 = {exports: {}};
/*!
* ee-first
* Copyright(c) 2014 Jonathan Ong
* MIT Licensed
*/
/**
* Module exports.
* @public
*/
var eeFirst = first$1;
/**
* Get the first event in a set of event emitters and event pairs.
*
* @param {array} stuff
* @param {function} done
* @public
*/
function first$1(stuff, done) {
if (!Array.isArray(stuff))
throw new TypeError('arg must be an array of [ee, events...] arrays')
var cleanups = [];
for (var i = 0; i < stuff.length; i++) {
var arr = stuff[i];
if (!Array.isArray(arr) || arr.length < 2)
throw new TypeError('each array member must be [ee, events...]')
var ee = arr[0];
for (var j = 1; j < arr.length; j++) {
var event = arr[j];
var fn = listener(event, callback);
// listen to the event
ee.on(event, fn);
// push this listener to the list of cleanups
cleanups.push({
ee: ee,
event: event,
fn: fn,
});
}
}
function callback() {
cleanup();
done.apply(null, arguments);
}
function cleanup() {
var x;
for (var i = 0; i < cleanups.length; i++) {
x = cleanups[i];
x.ee.removeListener(x.event, x.fn);
}
}
function thunk(fn) {
done = fn;
}
thunk.cancel = cleanup;
return thunk
}
/**
* Create the event listener.
* @private
*/
function listener(event, done) {
return function onevent(arg1) {
var args = new Array(arguments.length);
var ee = this;
var err = event === 'error'
? arg1
: null;
// copy args to prevent arguments escaping scope
for (var i = 0; i < args.length; i++) {
args[i] = arguments[i];
}
done(err, ee, event, args);
}
}
/*!
* on-finished
* Copyright(c) 2013 Jonathan Ong
* Copyright(c) 2014 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module exports.
* @public
*/
onFinished$2.exports = onFinished$1;
onFinished$2.exports.isFinished = isFinished$1;
/**
* Module dependencies.
* @private
*/
var first = eeFirst;
/**
* Variables.
* @private
*/
/* istanbul ignore next */
var defer$2 = typeof setImmediate === 'function'
? setImmediate
: function(fn){ process.nextTick(fn.bind.apply(fn, arguments)); };
/**
* Invoke callback when the response has finished, useful for
* cleaning up resources afterwards.
*
* @param {object} msg
* @param {function} listener
* @return {object}
* @public
*/
function onFinished$1(msg, listener) {
if (isFinished$1(msg) !== false) {
defer$2(listener, null, msg);
return msg
}
// attach the listener to the message
attachListener(msg, listener);
return msg
}
/**
* Determine if message is already finished.
*
* @param {object} msg
* @return {boolean}
* @public
*/
function isFinished$1(msg) {
var socket = msg.socket;
if (typeof msg.finished === 'boolean') {
// OutgoingMessage
return Boolean(msg.finished || (socket && !socket.writable))
}
if (typeof msg.complete === 'boolean') {
// IncomingMessage
return Boolean(msg.upgrade || !socket || !socket.readable || (msg.complete && !msg.readable))
}
// don't know
return undefined
}
/**
* Attach a finished listener to the message.
*
* @param {object} msg
* @param {function} callback
* @private
*/
function attachFinishedListener(msg, callback) {
var eeMsg;
var eeSocket;
var finished = false;
function onFinish(error) {
eeMsg.cancel();
eeSocket.cancel();
finished = true;
callback(error);
}
// finished on first message event
eeMsg = eeSocket = first([[msg, 'end', 'finish']], onFinish);
function onSocket(socket) {
// remove listener
msg.removeListener('socket', onSocket);
if (finished) return
if (eeMsg !== eeSocket) return
// finished on first socket event
eeSocket = first([[socket, 'error', 'close']], onFinish);
}
if (msg.socket) {
// socket already assigned
onSocket(msg.socket);
return
}
// wait for socket to be assigned
msg.on('socket', onSocket);
if (msg.socket === undefined) {
// node.js 0.8 patch
patchAssignSocket(msg, onSocket);
}
}
/**
* Attach the listener to the message.
*
* @param {object} msg
* @return {function}
* @private
*/
function attachListener(msg, listener) {
var attached = msg.__onFinished;
// create a private single listener with queue
if (!attached || !attached.queue) {
attached = msg.__onFinished = createListener(msg);
attachFinishedListener(msg, attached);
}
attached.queue.push(listener);
}
/**
* Create listener on message.
*
* @param {object} msg
* @return {function}
* @private
*/
function createListener(msg) {
function listener(err) {
if (msg.__onFinished === listener) msg.__onFinished = null;
if (!listener.queue) return
var queue = listener.queue;
listener.queue = null;
for (var i = 0; i < queue.length; i++) {
queue[i](err, msg);
}
}
listener.queue = [];
return listener
}
/**
* Patch ServerResponse.prototype.assignSocket for node.js 0.8.
*
* @param {ServerResponse} res
* @param {function} callback
* @private
*/
function patchAssignSocket(res, callback) {
var assignSocket = res.assignSocket;
if (typeof assignSocket !== 'function') return
// res.on('socket', callback) is broken in 0.8
res.assignSocket = function _assignSocket(socket) {
assignSocket.call(this, socket);
callback(socket);
};
}
var onFinishedExports = onFinished$2.exports;
var parseurl$1 = {exports: {}};
/*!
* parseurl
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2014-2017 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module dependencies.
* @private
*/
var url$3 = require$$0$9;
var parse$8 = url$3.parse;
var Url = url$3.Url;
/**
* Module exports.
* @public
*/
parseurl$1.exports = parseurl;
parseurl$1.exports.original = originalurl;
/**
* Parse the `req` url with memoization.
*
* @param {ServerRequest} req
* @return {Object}
* @public
*/
function parseurl (req) {
var url = req.url;
if (url === undefined) {
// URL is undefined
return undefined
}
var parsed = req._parsedUrl;
if (fresh(url, parsed)) {
// Return cached URL parse
return parsed
}
// Parse the URL
parsed = fastparse(url);
parsed._raw = url;
return (req._parsedUrl = parsed)
}
/**
* Parse the `req` original url with fallback and memoization.
*
* @param {ServerRequest} req
* @return {Object}
* @public
*/
function originalurl (req) {
var url = req.originalUrl;
if (typeof url !== 'string') {
// Fallback
return parseurl(req)
}
var parsed = req._parsedOriginalUrl;
if (fresh(url, parsed)) {
// Return cached URL parse
return parsed
}
// Parse the URL
parsed = fastparse(url);
parsed._raw = url;
return (req._parsedOriginalUrl = parsed)
}
/**
* Parse the `str` url with fast-path short-cut.
*
* @param {string} str
* @return {Object}
* @private
*/
function fastparse (str) {
if (typeof str !== 'string' || str.charCodeAt(0) !== 0x2f /* / */) {
return parse$8(str)
}
var pathname = str;
var query = null;
var search = null;
// This takes the regexp from https://github.com/joyent/node/pull/7878
// Which is /^(\/[^?#\s]*)(\?[^#\s]*)?$/
// And unrolls it into a for loop
for (var i = 1; i < str.length; i++) {
switch (str.charCodeAt(i)) {
case 0x3f: /* ? */
if (search === null) {
pathname = str.substring(0, i);
query = str.substring(i + 1);
search = str.substring(i);
}
break
case 0x09: /* \t */
case 0x0a: /* \n */
case 0x0c: /* \f */
case 0x0d: /* \r */
case 0x20: /* */
case 0x23: /* # */
case 0xa0:
case 0xfeff:
return parse$8(str)
}
}
var url = Url !== undefined
? new Url()
: {};
url.path = str;
url.href = str;
url.pathname = pathname;
if (search !== null) {
url.query = query;
url.search = search;
}
return url
}
/**
* Determine if parsed is still fresh for url.
*
* @param {string} url
* @param {object} parsedUrl
* @return {boolean}
* @private
*/
function fresh (url, parsedUrl) {
return typeof parsedUrl === 'object' &&
parsedUrl !== null &&
(Url === undefined || parsedUrl instanceof Url) &&
parsedUrl._raw === url
}
var parseurlExports = parseurl$1.exports;
var require$$0$1 = {
"100": "Continue",
"101": "Switching Protocols",
"102": "Processing",
"103": "Early Hints",
"200": "OK",
"201": "Created",
"202": "Accepted",
"203": "Non-Authoritative Information",
"204": "No Content",
"205": "Reset Content",
"206": "Partial Content",
"207": "Multi-Status",
"208": "Already Reported",
"226": "IM Used",
"300": "Multiple Choices",
"301": "Moved Permanently",
"302": "Found",
"303": "See Other",
"304": "Not Modified",
"305": "Use Proxy",
"306": "(Unused)",
"307": "Temporary Redirect",
"308": "Permanent Redirect",
"400": "Bad Request",
"401": "Unauthorized",
"402": "Payment Required",
"403": "Forbidden",
"404": "Not Found",
"405": "Method Not Allowed",
"406": "Not Acceptable",
"407": "Proxy Authentication Required",
"408": "Request Timeout",
"409": "Conflict",
"410": "Gone",
"411": "Length Required",
"412": "Precondition Failed",
"413": "Payload Too Large",
"414": "URI Too Long",
"415": "Unsupported Media Type",
"416": "Range Not Satisfiable",
"417": "Expectation Failed",
"418": "I'm a teapot",
"421": "Misdirected Request",
"422": "Unprocessable Entity",
"423": "Locked",
"424": "Failed Dependency",
"425": "Unordered Collection",
"426": "Upgrade Required",
"428": "Precondition Required",
"429": "Too Many Requests",
"431": "Request Header Fields Too Large",
"451": "Unavailable For Legal Reasons",
"500": "Internal Server Error",
"501": "Not Implemented",
"502": "Bad Gateway",
"503": "Service Unavailable",
"504": "Gateway Timeout",
"505": "HTTP Version Not Supported",
"506": "Variant Also Negotiates",
"507": "Insufficient Storage",
"508": "Loop Detected",
"509": "Bandwidth Limit Exceeded",
"510": "Not Extended",
"511": "Network Authentication Required"
};
/*!
* statuses
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2016 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module dependencies.
* @private
*/
var codes = require$$0$1;
/**
* Module exports.
* @public
*/
var statuses$1 = status;
// status code to message map
status.STATUS_CODES = codes;
// array of status codes
status.codes = populateStatusesMap(status, codes);
// status codes for redirects
status.redirect = {
300: true,
301: true,
302: true,
303: true,
305: true,
307: true,
308: true
};
// status codes for empty bodies
status.empty = {
204: true,
205: true,
304: true
};
// status codes for when you should retry the request
status.retry = {
502: true,
503: true,
504: true
};
/**
* Populate the statuses map for given codes.
* @private
*/
function populateStatusesMap (statuses, codes) {
var arr = [];
Object.keys(codes).forEach(function forEachCode (code) {
var message = codes[code];
var status = Number(code);
// Populate properties
statuses[status] = message;
statuses[message] = status;
statuses[message.toLowerCase()] = status;
// Add to array
arr.push(status);
});
return arr
}
/**
* Get the status code.
*
* Given a number, this will throw if it is not a known status
* code, otherwise the code will be returned. Given a string,
* the string will be parsed for a number and return the code
* if valid, otherwise will lookup the code assuming this is
* the status message.
*
* @param {string|number} code
* @returns {number}
* @public
*/
function status (code) {
if (typeof code === 'number') {
if (!status[code]) throw new Error('invalid status code: ' + code)
return code
}
if (typeof code !== 'string') {
throw new TypeError('code must be a number or string')
}
// '403'
var n = parseInt(code, 10);
if (!isNaN(n)) {
if (!status[n]) throw new Error('invalid status code: ' + n)
return n
}
n = status[code.toLowerCase()];
if (!n) throw new Error('invalid status message: "' + code + '"')
return n
}
/*!
* unpipe
* Copyright(c) 2015 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module exports.
* @public
*/
var unpipe_1 = unpipe$1;
/**
* Determine if there are Node.js pipe-like data listeners.
* @private
*/
function hasPipeDataListeners(stream) {
var listeners = stream.listeners('data');
for (var i = 0; i < listeners.length; i++) {
if (listeners[i].name === 'ondata') {
return true
}
}
return false
}
/**
* Unpipe a stream from all destinations.
*
* @param {object} stream
* @public
*/
function unpipe$1(stream) {
if (!stream) {
throw new TypeError('argument stream is required')
}
if (typeof stream.unpipe === 'function') {
// new-style
stream.unpipe();
return
}
// Node.js 0.8 hack
if (!hasPipeDataListeners(stream)) {
return
}
var listener;
var listeners = stream.listeners('close');
for (var i = 0; i < listeners.length; i++) {
listener = listeners[i];
if (listener.name !== 'cleanup' && listener.name !== 'onclose') {
continue
}
// invoke the listener
listener.call(stream);
}
}
/*!
* finalhandler
* Copyright(c) 2014-2017 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module dependencies.
* @private
*/
var debug$e = srcExports('finalhandler');
var encodeUrl = encodeurl;
var escapeHtml = escapeHtml_1;
var onFinished = onFinishedExports;
var parseUrl$2 = parseurlExports;
var statuses = statuses$1;
var unpipe = unpipe_1;
/**
* Module variables.
* @private
*/
var DOUBLE_SPACE_REGEXP = /\x20{2}/g;
var NEWLINE_REGEXP = /\n/g;
/* istanbul ignore next */
var defer$1 = typeof setImmediate === 'function'
? setImmediate
: function (fn) { process.nextTick(fn.bind.apply(fn, arguments)); };
var isFinished = onFinished.isFinished;
/**
* Create a minimal HTML document.
*
* @param {string} message
* @private
*/
function createHtmlDocument (message) {
var body = escapeHtml(message)
.replace(NEWLINE_REGEXP, '<br>')
.replace(DOUBLE_SPACE_REGEXP, ' ');
return '<!DOCTYPE html>\n' +
'<html lang="en">\n' +
'<head>\n' +
'<meta charset="utf-8">\n' +
'<title>Error</title>\n' +
'</head>\n' +
'<body>\n' +
'<pre>' + body + '</pre>\n' +
'</body>\n' +
'</html>\n'
}
/**
* Module exports.
* @public
*/
var finalhandler_1 = finalhandler$1;
/**
* Create a function to handle the final response.
*
* @param {Request} req
* @param {Response} res
* @param {Object} [options]
* @return {Function}
* @public
*/
function finalhandler$1 (req, res, options) {
var opts = options || {};
// get environment
var env = opts.env || process.env.NODE_ENV || 'development';
// get error callback
var onerror = opts.onerror;
return function (err) {
var headers;
var msg;
var status;
// ignore 404 on in-flight response
if (!err && headersSent(res)) {
debug$e('cannot 404 after headers sent');
return
}
// unhandled error
if (err) {
// respect status code from error
status = getErrorStatusCode(err);
if (status === undefined) {
// fallback to status code on response
status = getResponseStatusCode(res);
} else {
// respect headers from error
headers = getErrorHeaders(err);
}
// get error message
msg = getErrorMessage(err, status, env);
} else {
// not found
status = 404;
msg = 'Cannot ' + req.method + ' ' + encodeUrl(getResourceName(req));
}
debug$e('default %s', status);
// schedule onerror callback
if (err && onerror) {
defer$1(onerror, err, req, res);
}
// cannot actually respond
if (headersSent(res)) {
debug$e('cannot %d after headers sent', status);
req.socket.destroy();
return
}
// send response
send$2(req, res, status, headers, msg);
}
}
/**
* Get headers from Error object.
*
* @param {Error} err
* @return {object}
* @private
*/
function getErrorHeaders (err) {
if (!err.headers || typeof err.headers !== 'object') {
return undefined
}
var headers = Object.create(null);
var keys = Object.keys(err.headers);
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
headers[key] = err.headers[key];
}
return headers
}
/**
* Get message from Error object, fallback to status message.
*
* @param {Error} err
* @param {number} status
* @param {string} env
* @return {string}
* @private
*/
function getErrorMessage (err, status, env) {
var msg;
if (env !== 'production') {
// use err.stack, which typically includes err.message
msg = err.stack;
// fallback to err.toString() when possible
if (!msg && typeof err.toString === 'function') {
msg = err.toString();
}
}
return msg || statuses[status]
}
/**
* Get status code from Error object.
*
* @param {Error} err
* @return {number}
* @private
*/
function getErrorStatusCode (err) {
// check err.status
if (typeof err.status === 'number' && err.status >= 400 && err.status < 600) {
return err.status
}
// check err.statusCode
if (typeof err.statusCode === 'number' && err.statusCode >= 400 && err.statusCode < 600) {
return err.statusCode
}
return undefined
}
/**
* Get resource name for the request.
*
* This is typically just the original pathname of the request
* but will fallback to "resource" is that cannot be determined.
*
* @param {IncomingMessage} req
* @return {string}
* @private
*/
function getResourceName (req) {
try {
return parseUrl$2.original(req).pathname
} catch (e) {
return 'resource'
}
}
/**
* Get status code from response.
*
* @param {OutgoingMessage} res
* @return {number}
* @private
*/
function getResponseStatusCode (res) {
var status = res.statusCode;
// default status code to 500 if outside valid range
if (typeof status !== 'number' || status < 400 || status > 599) {
status = 500;
}
return status
}
/**
* Determine if the response headers have been sent.
*
* @param {object} res
* @returns {boolean}
* @private
*/
function headersSent (res) {
return typeof res.headersSent !== 'boolean'
? Boolean(res._header)
: res.headersSent
}
/**
* Send response.
*
* @param {IncomingMessage} req
* @param {OutgoingMessage} res
* @param {number} status
* @param {object} headers
* @param {string} message
* @private
*/
function send$2 (req, res, status, headers, message) {
function write () {
// response body
var body = createHtmlDocument(message);
// response status
res.statusCode = status;
res.statusMessage = statuses[status];
// response headers
setHeaders(res, headers);
// security headers
res.setHeader('Content-Security-Policy', "default-src 'none'");
res.setHeader('X-Content-Type-Options', 'nosniff');
// standard headers
res.setHeader('Content-Type', 'text/html; charset=utf-8');
res.setHeader('Content-Length', Buffer.byteLength(body, 'utf8'));
if (req.method === 'HEAD') {
res.end();
return
}
res.end(body, 'utf8');
}
if (isFinished(req)) {
write();
return
}
// unpipe everything from the request
unpipe(req);
// flush the request
onFinished(req, write);
req.resume();
}
/**
* Set response headers from an object.
*
* @param {OutgoingMessage} res
* @param {object} headers
* @private
*/
function setHeaders (res, headers) {
if (!headers) {
return
}
var keys = Object.keys(headers);
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
res.setHeader(key, headers[key]);
}
}
var utilsMerge = {exports: {}};
/**
* Merge object b with object a.
*
* var a = { foo: 'bar' }
* , b = { bar: 'baz' };
*
* merge(a, b);
* // => { foo: 'bar', bar: 'baz' }
*
* @param {Object} a
* @param {Object} b
* @return {Object}
* @api public
*/
(function (module, exports) {
module.exports = function(a, b){
if (a && b) {
for (var key in b) {
a[key] = b[key];
}
}
return a;
};
} (utilsMerge));
var utilsMergeExports = utilsMerge.exports;
/*!
* connect
* Copyright(c) 2010 Sencha Inc.
* Copyright(c) 2011 TJ Holowaychuk
* Copyright(c) 2015 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module dependencies.
* @private
*/
var debug$d = srcExports('connect:dispatcher');
var EventEmitter$3 = require$$0$7.EventEmitter;
var finalhandler = finalhandler_1;
var http$4 = require$$1;
var merge = utilsMergeExports;
var parseUrl$1 = parseurlExports;
/**
* Module exports.
* @public
*/
var connect = createServer$1;
/**
* Module variables.
* @private
*/
var env = process.env.NODE_ENV || 'development';
var proto = {};
/* istanbul ignore next */
var defer = typeof setImmediate === 'function'
? setImmediate
: function(fn){ process.nextTick(fn.bind.apply(fn, arguments)); };
/**
* Create a new connect server.
*
* @return {function}
* @public
*/
function createServer$1() {
function app(req, res, next){ app.handle(req, res, next); }
merge(app, proto);
merge(app, EventEmitter$3.prototype);
app.route = '/';
app.stack = [];
return app;
}
/**
* Utilize the given middleware `handle` to the given `route`,
* defaulting to _/_. This "route" is the mount-point for the
* middleware, when given a value other than _/_ the middleware
* is only effective when that segment is present in the request's
* pathname.
*
* For example if we were to mount a function at _/admin_, it would
* be invoked on _/admin_, and _/admin/settings_, however it would
* not be invoked for _/_, or _/posts_.
*
* @param {String|Function|Server} route, callback or server
* @param {Function|Server} callback or server
* @return {Server} for chaining
* @public
*/
proto.use = function use(route, fn) {
var handle = fn;
var path = route;
// default route to '/'
if (typeof route !== 'string') {
handle = route;
path = '/';
}
// wrap sub-apps
if (typeof handle.handle === 'function') {
var server = handle;
server.route = path;
handle = function (req, res, next) {
server.handle(req, res, next);
};
}
// wrap vanilla http.Servers
if (handle instanceof http$4.Server) {
handle = handle.listeners('request')[0];
}
// strip trailing slash
if (path[path.length - 1] === '/') {
path = path.slice(0, -1);
}
// add the middleware
debug$d('use %s %s', path || '/', handle.name || 'anonymous');
this.stack.push({ route: path, handle: handle });
return this;
};
/**
* Handle server requests, punting them down
* the middleware stack.
*
* @private
*/
proto.handle = function handle(req, res, out) {
var index = 0;
var protohost = getProtohost(req.url) || '';
var removed = '';
var slashAdded = false;
var stack = this.stack;
// final function handler
var done = out || finalhandler(req, res, {
env: env,
onerror: logerror
});
// store the original URL
req.originalUrl = req.originalUrl || req.url;
function next(err) {
if (slashAdded) {
req.url = req.url.substr(1);
slashAdded = false;
}
if (removed.length !== 0) {
req.url = protohost + removed + req.url.substr(protohost.length);
removed = '';
}
// next callback
var layer = stack[index++];
// all done
if (!layer) {
defer(done, err);
return;
}
// route data
var path = parseUrl$1(req).pathname || '/';
var route = layer.route;
// skip this layer if the route doesn't match
if (path.toLowerCase().substr(0, route.length) !== route.toLowerCase()) {
return next(err);
}
// skip if route match does not border "/", ".", or end
var c = path.length > route.length && path[route.length];
if (c && c !== '/' && c !== '.') {
return next(err);
}
// trim off the part of the url that matches the route
if (route.length !== 0 && route !== '/') {
removed = route;
req.url = protohost + req.url.substr(protohost.length + removed.length);
// ensure leading slash
if (!protohost && req.url[0] !== '/') {
req.url = '/' + req.url;
slashAdded = true;
}
}
// call the layer handle
call(layer.handle, route, err, req, res, next);
}
next();
};
/**
* Listen for connections.
*
* This method takes the same arguments
* as node's `http.Server#listen()`.
*
* HTTP and HTTPS:
*
* If you run your application both as HTTP
* and HTTPS you may wrap them individually,
* since your Connect "server" is really just
* a JavaScript `Function`.
*
* var connect = require('connect')
* , http = require('http')
* , https = require('https');
*
* var app = connect();
*
* http.createServer(app).listen(80);
* https.createServer(options, app).listen(443);
*
* @return {http.Server}
* @api public
*/
proto.listen = function listen() {
var server = http$4.createServer(this);
return server.listen.apply(server, arguments);
};
/**
* Invoke a route handle.
* @private
*/
function call(handle, route, err, req, res, next) {
var arity = handle.length;
var error = err;
var hasError = Boolean(err);
debug$d('%s %s : %s', handle.name || '<anonymous>', route, req.originalUrl);
try {
if (hasError && arity === 4) {
// error-handling middleware
handle(err, req, res, next);
return;
} else if (!hasError && arity < 4) {
// request-handling middleware
handle(req, res, next);
return;
}
} catch (e) {
// replace the error
error = e;
}
// continue
next(error);
}
/**
* Log error using console.error.
*
* @param {Error} err
* @private
*/
function logerror(err) {
if (env !== 'test') console.error(err.stack || err.toString());
}
/**
* Get get protocol + host for a URL.
*
* @param {string} url
* @private
*/
function getProtohost(url) {
if (url.length === 0 || url[0] === '/') {
return undefined;
}
var fqdnIndex = url.indexOf('://');
return fqdnIndex !== -1 && url.lastIndexOf('?', fqdnIndex) === -1
? url.substr(0, url.indexOf('/', 3 + fqdnIndex))
: undefined;
}
var connect$1 = /*@__PURE__*/getDefaultExportFromCjs(connect);
var lib = {exports: {}};
/*
object-assign
(c) Sindre Sorhus
@license MIT
*/
/* eslint-disable no-unused-vars */
var getOwnPropertySymbols = Object.getOwnPropertySymbols;
var hasOwnProperty = Object.prototype.hasOwnProperty;
var propIsEnumerable = Object.prototype.propertyIsEnumerable;
function toObject(val) {
if (val === null || val === undefined) {
throw new TypeError('Object.assign cannot be called with null or undefined');
}
return Object(val);
}
function shouldUseNative() {
try {
if (!Object.assign) {
return false;
}
// Detect buggy property enumeration order in older V8 versions.
// https://bugs.chromium.org/p/v8/issues/detail?id=4118
var test1 = new String('abc'); // eslint-disable-line no-new-wrappers
test1[5] = 'de';
if (Object.getOwnPropertyNames(test1)[0] === '5') {
return false;
}
// https://bugs.chromium.org/p/v8/issues/detail?id=3056
var test2 = {};
for (var i = 0; i < 10; i++) {
test2['_' + String.fromCharCode(i)] = i;
}
var order2 = Object.getOwnPropertyNames(test2).map(function (n) {
return test2[n];
});
if (order2.join('') !== '0123456789') {
return false;
}
// https://bugs.chromium.org/p/v8/issues/detail?id=3056
var test3 = {};
'abcdefghijklmnopqrst'.split('').forEach(function (letter) {
test3[letter] = letter;
});
if (Object.keys(Object.assign({}, test3)).join('') !==
'abcdefghijklmnopqrst') {
return false;
}
return true;
} catch (err) {
// We don't expect any of the above to throw, but better to be safe.
return false;
}
}
var objectAssign = shouldUseNative() ? Object.assign : function (target, source) {
var from;
var to = toObject(target);
var symbols;
for (var s = 1; s < arguments.length; s++) {
from = Object(arguments[s]);
for (var key in from) {
if (hasOwnProperty.call(from, key)) {
to[key] = from[key];
}
}
if (getOwnPropertySymbols) {
symbols = getOwnPropertySymbols(from);
for (var i = 0; i < symbols.length; i++) {
if (propIsEnumerable.call(from, symbols[i])) {
to[symbols[i]] = from[symbols[i]];
}
}
}
}
return to;
};
var vary$1 = {exports: {}};
/*!
* vary
* Copyright(c) 2014-2017 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module exports.
*/
vary$1.exports = vary;
vary$1.exports.append = append;
/**
* RegExp to match field-name in RFC 7230 sec 3.2
*
* field-name = token
* token = 1*tchar
* tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
* / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
* / DIGIT / ALPHA
* ; any VCHAR, except delimiters
*/
var FIELD_NAME_REGEXP = /^[!#$%&'*+\-.^_`|~0-9A-Za-z]+$/;
/**
* Append a field to a vary header.
*
* @param {String} header
* @param {String|Array} field
* @return {String}
* @public
*/
function append (header, field) {
if (typeof header !== 'string') {
throw new TypeError('header argument is required')
}
if (!field) {
throw new TypeError('field argument is required')
}
// get fields array
var fields = !Array.isArray(field)
? parse$7(String(field))
: field;
// assert on invalid field names
for (var j = 0; j < fields.length; j++) {
if (!FIELD_NAME_REGEXP.test(fields[j])) {
throw new TypeError('field argument contains an invalid header name')
}
}
// existing, unspecified vary
if (header === '*') {
return header
}
// enumerate current values
var val = header;
var vals = parse$7(header.toLowerCase());
// unspecified vary
if (fields.indexOf('*') !== -1 || vals.indexOf('*') !== -1) {
return '*'
}
for (var i = 0; i < fields.length; i++) {
var fld = fields[i].toLowerCase();
// append value (case-preserving)
if (vals.indexOf(fld) === -1) {
vals.push(fld);
val = val
? val + ', ' + fields[i]
: fields[i];
}
}
return val
}
/**
* Parse a vary header into an array.
*
* @param {String} header
* @return {Array}
* @private
*/
function parse$7 (header) {
var end = 0;
var list = [];
var start = 0;
// gather tokens
for (var i = 0, len = header.length; i < len; i++) {
switch (header.charCodeAt(i)) {
case 0x20: /* */
if (start === end) {
start = end = i + 1;
}
break
case 0x2c: /* , */
list.push(header.substring(start, end));
start = end = i + 1;
break
default:
end = i + 1;
break
}
}
// final token
list.push(header.substring(start, end));
return list
}
/**
* Mark that a request is varied on a header field.
*
* @param {Object} res
* @param {String|Array} field
* @public
*/
function vary (res, field) {
if (!res || !res.getHeader || !res.setHeader) {
// quack quack
throw new TypeError('res argument is required')
}
// get existing header
var val = res.getHeader('Vary') || '';
var header = Array.isArray(val)
? val.join(', ')
: String(val);
// set new header
if ((val = append(header, field))) {
res.setHeader('Vary', val);
}
}
var varyExports = vary$1.exports;
(function () {
var assign = objectAssign;
var vary = varyExports;
var defaults = {
origin: '*',
methods: 'GET,HEAD,PUT,PATCH,POST,DELETE',
preflightContinue: false,
optionsSuccessStatus: 204
};
function isString(s) {
return typeof s === 'string' || s instanceof String;
}
function isOriginAllowed(origin, allowedOrigin) {
if (Array.isArray(allowedOrigin)) {
for (var i = 0; i < allowedOrigin.length; ++i) {
if (isOriginAllowed(origin, allowedOrigin[i])) {
return true;
}
}
return false;
} else if (isString(allowedOrigin)) {
return origin === allowedOrigin;
} else if (allowedOrigin instanceof RegExp) {
return allowedOrigin.test(origin);
} else {
return !!allowedOrigin;
}
}
function configureOrigin(options, req) {
var requestOrigin = req.headers.origin,
headers = [],
isAllowed;
if (!options.origin || options.origin === '*') {
// allow any origin
headers.push([{
key: 'Access-Control-Allow-Origin',
value: '*'
}]);
} else if (isString(options.origin)) {
// fixed origin
headers.push([{
key: 'Access-Control-Allow-Origin',
value: options.origin
}]);
headers.push([{
key: 'Vary',
value: 'Origin'
}]);
} else {
isAllowed = isOriginAllowed(requestOrigin, options.origin);
// reflect origin
headers.push([{
key: 'Access-Control-Allow-Origin',
value: isAllowed ? requestOrigin : false
}]);
headers.push([{
key: 'Vary',
value: 'Origin'
}]);
}
return headers;
}
function configureMethods(options) {
var methods = options.methods;
if (methods.join) {
methods = options.methods.join(','); // .methods is an array, so turn it into a string
}
return {
key: 'Access-Control-Allow-Methods',
value: methods
};
}
function configureCredentials(options) {
if (options.credentials === true) {
return {
key: 'Access-Control-Allow-Credentials',
value: 'true'
};
}
return null;
}
function configureAllowedHeaders(options, req) {
var allowedHeaders = options.allowedHeaders || options.headers;
var headers = [];
if (!allowedHeaders) {
allowedHeaders = req.headers['access-control-request-headers']; // .headers wasn't specified, so reflect the request headers
headers.push([{
key: 'Vary',
value: 'Access-Control-Request-Headers'
}]);
} else if (allowedHeaders.join) {
allowedHeaders = allowedHeaders.join(','); // .headers is an array, so turn it into a string
}
if (allowedHeaders && allowedHeaders.length) {
headers.push([{
key: 'Access-Control-Allow-Headers',
value: allowedHeaders
}]);
}
return headers;
}
function configureExposedHeaders(options) {
var headers = options.exposedHeaders;
if (!headers) {
return null;
} else if (headers.join) {
headers = headers.join(','); // .headers is an array, so turn it into a string
}
if (headers && headers.length) {
return {
key: 'Access-Control-Expose-Headers',
value: headers
};
}
return null;
}
function configureMaxAge(options) {
var maxAge = (typeof options.maxAge === 'number' || options.maxAge) && options.maxAge.toString();
if (maxAge && maxAge.length) {
return {
key: 'Access-Control-Max-Age',
value: maxAge
};
}
return null;
}
function applyHeaders(headers, res) {
for (var i = 0, n = headers.length; i < n; i++) {
var header = headers[i];
if (header) {
if (Array.isArray(header)) {
applyHeaders(header, res);
} else if (header.key === 'Vary' && header.value) {
vary(res, header.value);
} else if (header.value) {
res.setHeader(header.key, header.value);
}
}
}
}
function cors(options, req, res, next) {
var headers = [],
method = req.method && req.method.toUpperCase && req.method.toUpperCase();
if (method === 'OPTIONS') {
// preflight
headers.push(configureOrigin(options, req));
headers.push(configureCredentials(options));
headers.push(configureMethods(options));
headers.push(configureAllowedHeaders(options, req));
headers.push(configureMaxAge(options));
headers.push(configureExposedHeaders(options));
applyHeaders(headers, res);
if (options.preflightContinue) {
next();
} else {
// Safari (and potentially other browsers) need content-length 0,
// for 204 or they just hang waiting for a body
res.statusCode = options.optionsSuccessStatus;
res.setHeader('Content-Length', '0');
res.end();
}
} else {
// actual response
headers.push(configureOrigin(options, req));
headers.push(configureCredentials(options));
headers.push(configureExposedHeaders(options));
applyHeaders(headers, res);
next();
}
}
function middlewareWrapper(o) {
// if options are static (either via defaults or custom options passed in), wrap in a function
var optionsCallback = null;
if (typeof o === 'function') {
optionsCallback = o;
} else {
optionsCallback = function (req, cb) {
cb(null, o);
};
}
return function corsMiddleware(req, res, next) {
optionsCallback(req, function (err, options) {
if (err) {
next(err);
} else {
var corsOptions = assign({}, defaults, options);
var originCallback = null;
if (corsOptions.origin && typeof corsOptions.origin === 'function') {
originCallback = corsOptions.origin;
} else if (corsOptions.origin) {
originCallback = function (origin, cb) {
cb(null, corsOptions.origin);
};
}
if (originCallback) {
originCallback(req.headers.origin, function (err2, origin) {
if (err2 || !origin) {
next(err2);
} else {
corsOptions.origin = origin;
cors(corsOptions, req, res, next);
}
});
} else {
next();
}
}
});
};
}
// can pass either an options hash, an options delegate, or nothing
lib.exports = middlewareWrapper;
}());
var libExports = lib.exports;
var corsMiddleware = /*@__PURE__*/getDefaultExportFromCjs(libExports);
var chokidar = {};
const fs$8 = require$$0__default;
const { Readable } = require$$0$6;
const sysPath$3 = require$$0$4;
const { promisify: promisify$3 } = require$$0$5;
const picomatch$1 = picomatch$3;
const readdir$1 = promisify$3(fs$8.readdir);
const stat$3 = promisify$3(fs$8.stat);
const lstat$2 = promisify$3(fs$8.lstat);
const realpath$1 = promisify$3(fs$8.realpath);
/**
* @typedef {Object} EntryInfo
* @property {String} path
* @property {String} fullPath
* @property {fs.Stats=} stats
* @property {fs.Dirent=} dirent
* @property {String} basename
*/
const BANG$2 = '!';
const RECURSIVE_ERROR_CODE = 'READDIRP_RECURSIVE_ERROR';
const NORMAL_FLOW_ERRORS = new Set(['ENOENT', 'EPERM', 'EACCES', 'ELOOP', RECURSIVE_ERROR_CODE]);
const FILE_TYPE = 'files';
const DIR_TYPE = 'directories';
const FILE_DIR_TYPE = 'files_directories';
const EVERYTHING_TYPE = 'all';
const ALL_TYPES = [FILE_TYPE, DIR_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE];
const isNormalFlowError = error => NORMAL_FLOW_ERRORS.has(error.code);
const [maj, min] = process.versions.node.split('.').slice(0, 2).map(n => Number.parseInt(n, 10));
const wantBigintFsStats = process.platform === 'win32' && (maj > 10 || (maj === 10 && min >= 5));
const normalizeFilter = filter => {
if (filter === undefined) return;
if (typeof filter === 'function') return filter;
if (typeof filter === 'string') {
const glob = picomatch$1(filter.trim());
return entry => glob(entry.basename);
}
if (Array.isArray(filter)) {
const positive = [];
const negative = [];
for (const item of filter) {
const trimmed = item.trim();
if (trimmed.charAt(0) === BANG$2) {
negative.push(picomatch$1(trimmed.slice(1)));
} else {
positive.push(picomatch$1(trimmed));
}
}
if (negative.length > 0) {
if (positive.length > 0) {
return entry =>
positive.some(f => f(entry.basename)) && !negative.some(f => f(entry.basename));
}
return entry => !negative.some(f => f(entry.basename));
}
return entry => positive.some(f => f(entry.basename));
}
};
class ReaddirpStream extends Readable {
static get defaultOptions() {
return {
root: '.',
/* eslint-disable no-unused-vars */
fileFilter: (path) => true,
directoryFilter: (path) => true,
/* eslint-enable no-unused-vars */
type: FILE_TYPE,
lstat: false,
depth: 2147483648,
alwaysStat: false
};
}
constructor(options = {}) {
super({
objectMode: true,
autoDestroy: true,
highWaterMark: options.highWaterMark || 4096
});
const opts = { ...ReaddirpStream.defaultOptions, ...options };
const { root, type } = opts;
this._fileFilter = normalizeFilter(opts.fileFilter);
this._directoryFilter = normalizeFilter(opts.directoryFilter);
const statMethod = opts.lstat ? lstat$2 : stat$3;
// Use bigint stats if it's windows and stat() supports options (node 10+).
if (wantBigintFsStats) {
this._stat = path => statMethod(path, { bigint: true });
} else {
this._stat = statMethod;
}
this._maxDepth = opts.depth;
this._wantsDir = [DIR_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE].includes(type);
this._wantsFile = [FILE_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE].includes(type);
this._wantsEverything = type === EVERYTHING_TYPE;
this._root = sysPath$3.resolve(root);
this._isDirent = ('Dirent' in fs$8) && !opts.alwaysStat;
this._statsProp = this._isDirent ? 'dirent' : 'stats';
this._rdOptions = { encoding: 'utf8', withFileTypes: this._isDirent };
// Launch stream with one parent, the root dir.
this.parents = [this._exploreDir(root, 1)];
this.reading = false;
this.parent = undefined;
}
async _read(batch) {
if (this.reading) return;
this.reading = true;
try {
while (!this.destroyed && batch > 0) {
const { path, depth, files = [] } = this.parent || {};
if (files.length > 0) {
const slice = files.splice(0, batch).map(dirent => this._formatEntry(dirent, path));
for (const entry of await Promise.all(slice)) {
if (this.destroyed) return;
const entryType = await this._getEntryType(entry);
if (entryType === 'directory' && this._directoryFilter(entry)) {
if (depth <= this._maxDepth) {
this.parents.push(this._exploreDir(entry.fullPath, depth + 1));
}
if (this._wantsDir) {
this.push(entry);
batch--;
}
} else if ((entryType === 'file' || this._includeAsFile(entry)) && this._fileFilter(entry)) {
if (this._wantsFile) {
this.push(entry);
batch--;
}
}
}
} else {
const parent = this.parents.pop();
if (!parent) {
this.push(null);
break;
}
this.parent = await parent;
if (this.destroyed) return;
}
}
} catch (error) {
this.destroy(error);
} finally {
this.reading = false;
}
}
async _exploreDir(path, depth) {
let files;
try {
files = await readdir$1(path, this._rdOptions);
} catch (error) {
this._onError(error);
}
return { files, depth, path };
}
async _formatEntry(dirent, path) {
let entry;
try {
const basename = this._isDirent ? dirent.name : dirent;
const fullPath = sysPath$3.resolve(sysPath$3.join(path, basename));
entry = { path: sysPath$3.relative(this._root, fullPath), fullPath, basename };
entry[this._statsProp] = this._isDirent ? dirent : await this._stat(fullPath);
} catch (err) {
this._onError(err);
}
return entry;
}
_onError(err) {
if (isNormalFlowError(err) && !this.destroyed) {
this.emit('warn', err);
} else {
this.destroy(err);
}
}
async _getEntryType(entry) {
// entry may be undefined, because a warning or an error were emitted
// and the statsProp is undefined
const stats = entry && entry[this._statsProp];
if (!stats) {
return;
}
if (stats.isFile()) {
return 'file';
}
if (stats.isDirectory()) {
return 'directory';
}
if (stats && stats.isSymbolicLink()) {
const full = entry.fullPath;
try {
const entryRealPath = await realpath$1(full);
const entryRealPathStats = await lstat$2(entryRealPath);
if (entryRealPathStats.isFile()) {
return 'file';
}
if (entryRealPathStats.isDirectory()) {
const len = entryRealPath.length;
if (full.startsWith(entryRealPath) && full.substr(len, 1) === sysPath$3.sep) {
const recursiveError = new Error(
`Circular symlink detected: "${full}" points to "${entryRealPath}"`
);
recursiveError.code = RECURSIVE_ERROR_CODE;
return this._onError(recursiveError);
}
return 'directory';
}
} catch (error) {
this._onError(error);
}
}
}
_includeAsFile(entry) {
const stats = entry && entry[this._statsProp];
return stats && this._wantsEverything && !stats.isDirectory();
}
}
/**
* @typedef {Object} ReaddirpArguments
* @property {Function=} fileFilter
* @property {Function=} directoryFilter
* @property {String=} type
* @property {Number=} depth
* @property {String=} root
* @property {Boolean=} lstat
* @property {Boolean=} bigint
*/
/**
* Main function which ends up calling readdirRec and reads all files and directories in given root recursively.
* @param {String} root Root directory
* @param {ReaddirpArguments=} options Options to specify root (start directory), filters and recursion depth
*/
const readdirp$1 = (root, options = {}) => {
let type = options.entryType || options.type;
if (type === 'both') type = FILE_DIR_TYPE; // backwards-compatibility
if (type) options.type = type;
if (!root) {
throw new Error('readdirp: root argument is required. Usage: readdirp(root, options)');
} else if (typeof root !== 'string') {
throw new TypeError('readdirp: root argument must be a string. Usage: readdirp(root, options)');
} else if (type && !ALL_TYPES.includes(type)) {
throw new Error(`readdirp: Invalid type passed. Use one of ${ALL_TYPES.join(', ')}`);
}
options.root = root;
return new ReaddirpStream(options);
};
const readdirpPromise = (root, options = {}) => {
return new Promise((resolve, reject) => {
const files = [];
readdirp$1(root, options)
.on('data', entry => files.push(entry))
.on('end', () => resolve(files))
.on('error', error => reject(error));
});
};
readdirp$1.promise = readdirpPromise;
readdirp$1.ReaddirpStream = ReaddirpStream;
readdirp$1.default = readdirp$1;
var readdirp_1 = readdirp$1;
var anymatch$2 = {exports: {}};
/*!
* normalize-path <https://github.com/jonschlinkert/normalize-path>
*
* Copyright (c) 2014-2018, Jon Schlinkert.
* Released under the MIT License.
*/
var normalizePath$2 = function(path, stripTrailing) {
if (typeof path !== 'string') {
throw new TypeError('expected path to be a string');
}
if (path === '\\' || path === '/') return '/';
var len = path.length;
if (len <= 1) return path;
// ensure that win32 namespaces has two leading slashes, so that the path is
// handled properly by the win32 version of path.parse() after being normalized
// https://msdn.microsoft.com/library/windows/desktop/aa365247(v=vs.85).aspx#namespaces
var prefix = '';
if (len > 4 && path[3] === '\\') {
var ch = path[2];
if ((ch === '?' || ch === '.') && path.slice(0, 2) === '\\\\') {
path = path.slice(2);
prefix = '//';
}
}
var segs = path.split(/[/\\]+/);
if (stripTrailing !== false && segs[segs.length - 1] === '') {
segs.pop();
}
return prefix + segs.join('/');
};
var anymatch_1 = anymatch$2.exports;
Object.defineProperty(anymatch_1, "__esModule", { value: true });
const picomatch = picomatch$3;
const normalizePath$1 = normalizePath$2;
/**
* @typedef {(testString: string) => boolean} AnymatchFn
* @typedef {string|RegExp|AnymatchFn} AnymatchPattern
* @typedef {AnymatchPattern|AnymatchPattern[]} AnymatchMatcher
*/
const BANG$1 = '!';
const DEFAULT_OPTIONS = {returnIndex: false};
const arrify$1 = (item) => Array.isArray(item) ? item : [item];
/**
* @param {AnymatchPattern} matcher
* @param {object} options
* @returns {AnymatchFn}
*/
const createPattern = (matcher, options) => {
if (typeof matcher === 'function') {
return matcher;
}
if (typeof matcher === 'string') {
const glob = picomatch(matcher, options);
return (string) => matcher === string || glob(string);
}
if (matcher instanceof RegExp) {
return (string) => matcher.test(string);
}
return (string) => false;
};
/**
* @param {Array<Function>} patterns
* @param {Array<Function>} negPatterns
* @param {String|Array} args
* @param {Boolean} returnIndex
* @returns {boolean|number}
*/
const matchPatterns = (patterns, negPatterns, args, returnIndex) => {
const isList = Array.isArray(args);
const _path = isList ? args[0] : args;
if (!isList && typeof _path !== 'string') {
throw new TypeError('anymatch: second argument must be a string: got ' +
Object.prototype.toString.call(_path))
}
const path = normalizePath$1(_path);
for (let index = 0; index < negPatterns.length; index++) {
const nglob = negPatterns[index];
if (nglob(path)) {
return returnIndex ? -1 : false;
}
}
const applied = isList && [path].concat(args.slice(1));
for (let index = 0; index < patterns.length; index++) {
const pattern = patterns[index];
if (isList ? pattern(...applied) : pattern(path)) {
return returnIndex ? index : true;
}
}
return returnIndex ? -1 : false;
};
/**
* @param {AnymatchMatcher} matchers
* @param {Array|string} testString
* @param {object} options
* @returns {boolean|number|Function}
*/
const anymatch$1 = (matchers, testString, options = DEFAULT_OPTIONS) => {
if (matchers == null) {
throw new TypeError('anymatch: specify first argument');
}
const opts = typeof options === 'boolean' ? {returnIndex: options} : options;
const returnIndex = opts.returnIndex || false;
// Early cache for matchers.
const mtchers = arrify$1(matchers);
const negatedGlobs = mtchers
.filter(item => typeof item === 'string' && item.charAt(0) === BANG$1)
.map(item => item.slice(1))
.map(item => picomatch(item, opts));
const patterns = mtchers
.filter(item => typeof item !== 'string' || (typeof item === 'string' && item.charAt(0) !== BANG$1))
.map(matcher => createPattern(matcher, opts));
if (testString == null) {
return (testString, ri = false) => {
const returnIndex = typeof ri === 'boolean' ? ri : false;
return matchPatterns(patterns, negatedGlobs, testString, returnIndex);
}
}
return matchPatterns(patterns, negatedGlobs, testString, returnIndex);
};
anymatch$1.default = anymatch$1;
anymatch$2.exports = anymatch$1;
var anymatchExports = anymatch$2.exports;
var require$$0 = [
"3dm",
"3ds",
"3g2",
"3gp",
"7z",
"a",
"aac",
"adp",
"ai",
"aif",
"aiff",
"alz",
"ape",
"apk",
"appimage",
"ar",
"arj",
"asf",
"au",
"avi",
"bak",
"baml",
"bh",
"bin",
"bk",
"bmp",
"btif",
"bz2",
"bzip2",
"cab",
"caf",
"cgm",
"class",
"cmx",
"cpio",
"cr2",
"cur",
"dat",
"dcm",
"deb",
"dex",
"djvu",
"dll",
"dmg",
"dng",
"doc",
"docm",
"docx",
"dot",
"dotm",
"dra",
"DS_Store",
"dsk",
"dts",
"dtshd",
"dvb",
"dwg",
"dxf",
"ecelp4800",
"ecelp7470",
"ecelp9600",
"egg",
"eol",
"eot",
"epub",
"exe",
"f4v",
"fbs",
"fh",
"fla",
"flac",
"flatpak",
"fli",
"flv",
"fpx",
"fst",
"fvt",
"g3",
"gh",
"gif",
"graffle",
"gz",
"gzip",
"h261",
"h263",
"h264",
"icns",
"ico",
"ief",
"img",
"ipa",
"iso",
"jar",
"jpeg",
"jpg",
"jpgv",
"jpm",
"jxr",
"key",
"ktx",
"lha",
"lib",
"lvp",
"lz",
"lzh",
"lzma",
"lzo",
"m3u",
"m4a",
"m4v",
"mar",
"mdi",
"mht",
"mid",
"midi",
"mj2",
"mka",
"mkv",
"mmr",
"mng",
"mobi",
"mov",
"movie",
"mp3",
"mp4",
"mp4a",
"mpeg",
"mpg",
"mpga",
"mxu",
"nef",
"npx",
"numbers",
"nupkg",
"o",
"odp",
"ods",
"odt",
"oga",
"ogg",
"ogv",
"otf",
"ott",
"pages",
"pbm",
"pcx",
"pdb",
"pdf",
"pea",
"pgm",
"pic",
"png",
"pnm",
"pot",
"potm",
"potx",
"ppa",
"ppam",
"ppm",
"pps",
"ppsm",
"ppsx",
"ppt",
"pptm",
"pptx",
"psd",
"pya",
"pyc",
"pyo",
"pyv",
"qt",
"rar",
"ras",
"raw",
"resources",
"rgb",
"rip",
"rlc",
"rmf",
"rmvb",
"rpm",
"rtf",
"rz",
"s3m",
"s7z",
"scpt",
"sgi",
"shar",
"snap",
"sil",
"sketch",
"slk",
"smv",
"snk",
"so",
"stl",
"suo",
"sub",
"swf",
"tar",
"tbz",
"tbz2",
"tga",
"tgz",
"thmx",
"tif",
"tiff",
"tlz",
"ttc",
"ttf",
"txz",
"udf",
"uvh",
"uvi",
"uvm",
"uvp",
"uvs",
"uvu",
"viv",
"vob",
"war",
"wav",
"wax",
"wbmp",
"wdp",
"weba",
"webm",
"webp",
"whl",
"wim",
"wm",
"wma",
"wmv",
"wmx",
"woff",
"woff2",
"wrm",
"wvx",
"xbm",
"xif",
"xla",
"xlam",
"xls",
"xlsb",
"xlsm",
"xlsx",
"xlt",
"xltm",
"xltx",
"xm",
"xmind",
"xpi",
"xpm",
"xwd",
"xz",
"z",
"zip",
"zipx"
];
var binaryExtensions$1 = require$$0;
const path$8 = require$$0$4;
const binaryExtensions = binaryExtensions$1;
const extensions = new Set(binaryExtensions);
var isBinaryPath$1 = filePath => extensions.has(path$8.extname(filePath).slice(1).toLowerCase());
var constants$1 = {};
(function (exports) {
const {sep} = require$$0$4;
const {platform} = process;
const os = require$$2;
exports.EV_ALL = 'all';
exports.EV_READY = 'ready';
exports.EV_ADD = 'add';
exports.EV_CHANGE = 'change';
exports.EV_ADD_DIR = 'addDir';
exports.EV_UNLINK = 'unlink';
exports.EV_UNLINK_DIR = 'unlinkDir';
exports.EV_RAW = 'raw';
exports.EV_ERROR = 'error';
exports.STR_DATA = 'data';
exports.STR_END = 'end';
exports.STR_CLOSE = 'close';
exports.FSEVENT_CREATED = 'created';
exports.FSEVENT_MODIFIED = 'modified';
exports.FSEVENT_DELETED = 'deleted';
exports.FSEVENT_MOVED = 'moved';
exports.FSEVENT_CLONED = 'cloned';
exports.FSEVENT_UNKNOWN = 'unknown';
exports.FSEVENT_FLAG_MUST_SCAN_SUBDIRS = 1;
exports.FSEVENT_TYPE_FILE = 'file';
exports.FSEVENT_TYPE_DIRECTORY = 'directory';
exports.FSEVENT_TYPE_SYMLINK = 'symlink';
exports.KEY_LISTENERS = 'listeners';
exports.KEY_ERR = 'errHandlers';
exports.KEY_RAW = 'rawEmitters';
exports.HANDLER_KEYS = [exports.KEY_LISTENERS, exports.KEY_ERR, exports.KEY_RAW];
exports.DOT_SLASH = `.${sep}`;
exports.BACK_SLASH_RE = /\\/g;
exports.DOUBLE_SLASH_RE = /\/\//;
exports.SLASH_OR_BACK_SLASH_RE = /[/\\]/;
exports.DOT_RE = /\..*\.(sw[px])$|~$|\.subl.*\.tmp/;
exports.REPLACER_RE = /^\.[/\\]/;
exports.SLASH = '/';
exports.SLASH_SLASH = '//';
exports.BRACE_START = '{';
exports.BANG = '!';
exports.ONE_DOT = '.';
exports.TWO_DOTS = '..';
exports.STAR = '*';
exports.GLOBSTAR = '**';
exports.ROOT_GLOBSTAR = '/**/*';
exports.SLASH_GLOBSTAR = '/**';
exports.DIR_SUFFIX = 'Dir';
exports.ANYMATCH_OPTS = {dot: true};
exports.STRING_TYPE = 'string';
exports.FUNCTION_TYPE = 'function';
exports.EMPTY_STR = '';
exports.EMPTY_FN = () => {};
exports.IDENTITY_FN = val => val;
exports.isWindows = platform === 'win32';
exports.isMacos = platform === 'darwin';
exports.isLinux = platform === 'linux';
exports.isIBMi = os.type() === 'OS400';
} (constants$1));
const fs$7 = require$$0__default;
const sysPath$2 = require$$0$4;
const { promisify: promisify$2 } = require$$0$5;
const isBinaryPath = isBinaryPath$1;
const {
isWindows: isWindows$2,
isLinux,
EMPTY_FN: EMPTY_FN$2,
EMPTY_STR: EMPTY_STR$1,
KEY_LISTENERS,
KEY_ERR,
KEY_RAW,
HANDLER_KEYS,
EV_CHANGE: EV_CHANGE$2,
EV_ADD: EV_ADD$2,
EV_ADD_DIR: EV_ADD_DIR$2,
EV_ERROR: EV_ERROR$2,
STR_DATA: STR_DATA$1,
STR_END: STR_END$2,
BRACE_START: BRACE_START$1,
STAR
} = constants$1;
const THROTTLE_MODE_WATCH = 'watch';
const open$2 = promisify$2(fs$7.open);
const stat$2 = promisify$2(fs$7.stat);
const lstat$1 = promisify$2(fs$7.lstat);
const close = promisify$2(fs$7.close);
const fsrealpath = promisify$2(fs$7.realpath);
const statMethods$1 = { lstat: lstat$1, stat: stat$2 };
// TODO: emit errors properly. Example: EMFILE on Macos.
const foreach = (val, fn) => {
if (val instanceof Set) {
val.forEach(fn);
} else {
fn(val);
}
};
const addAndConvert = (main, prop, item) => {
let container = main[prop];
if (!(container instanceof Set)) {
main[prop] = container = new Set([container]);
}
container.add(item);
};
const clearItem = cont => key => {
const set = cont[key];
if (set instanceof Set) {
set.clear();
} else {
delete cont[key];
}
};
const delFromSet = (main, prop, item) => {
const container = main[prop];
if (container instanceof Set) {
container.delete(item);
} else if (container === item) {
delete main[prop];
}
};
const isEmptySet = (val) => val instanceof Set ? val.size === 0 : !val;
/**
* @typedef {String} Path
*/
// fs_watch helpers
// object to hold per-process fs_watch instances
// (may be shared across chokidar FSWatcher instances)
/**
* @typedef {Object} FsWatchContainer
* @property {Set} listeners
* @property {Set} errHandlers
* @property {Set} rawEmitters
* @property {fs.FSWatcher=} watcher
* @property {Boolean=} watcherUnusable
*/
/**
* @type {Map<String,FsWatchContainer>}
*/
const FsWatchInstances = new Map();
/**
* Instantiates the fs_watch interface
* @param {String} path to be watched
* @param {Object} options to be passed to fs_watch
* @param {Function} listener main event handler
* @param {Function} errHandler emits info about errors
* @param {Function} emitRaw emits raw event data
* @returns {fs.FSWatcher} new fsevents instance
*/
function createFsWatchInstance(path, options, listener, errHandler, emitRaw) {
const handleEvent = (rawEvent, evPath) => {
listener(path);
emitRaw(rawEvent, evPath, {watchedPath: path});
// emit based on events occurring for files from a directory's watcher in
// case the file's watcher misses it (and rely on throttling to de-dupe)
if (evPath && path !== evPath) {
fsWatchBroadcast(
sysPath$2.resolve(path, evPath), KEY_LISTENERS, sysPath$2.join(path, evPath)
);
}
};
try {
return fs$7.watch(path, options, handleEvent);
} catch (error) {
errHandler(error);
}
}
/**
* Helper for passing fs_watch event data to a collection of listeners
* @param {Path} fullPath absolute path bound to fs_watch instance
* @param {String} type listener type
* @param {*=} val1 arguments to be passed to listeners
* @param {*=} val2
* @param {*=} val3
*/
const fsWatchBroadcast = (fullPath, type, val1, val2, val3) => {
const cont = FsWatchInstances.get(fullPath);
if (!cont) return;
foreach(cont[type], (listener) => {
listener(val1, val2, val3);
});
};
/**
* Instantiates the fs_watch interface or binds listeners
* to an existing one covering the same file system entry
* @param {String} path
* @param {String} fullPath absolute path
* @param {Object} options to be passed to fs_watch
* @param {Object} handlers container for event listener functions
*/
const setFsWatchListener = (path, fullPath, options, handlers) => {
const {listener, errHandler, rawEmitter} = handlers;
let cont = FsWatchInstances.get(fullPath);
/** @type {fs.FSWatcher=} */
let watcher;
if (!options.persistent) {
watcher = createFsWatchInstance(
path, options, listener, errHandler, rawEmitter
);
return watcher.close.bind(watcher);
}
if (cont) {
addAndConvert(cont, KEY_LISTENERS, listener);
addAndConvert(cont, KEY_ERR, errHandler);
addAndConvert(cont, KEY_RAW, rawEmitter);
} else {
watcher = createFsWatchInstance(
path,
options,
fsWatchBroadcast.bind(null, fullPath, KEY_LISTENERS),
errHandler, // no need to use broadcast here
fsWatchBroadcast.bind(null, fullPath, KEY_RAW)
);
if (!watcher) return;
watcher.on(EV_ERROR$2, async (error) => {
const broadcastErr = fsWatchBroadcast.bind(null, fullPath, KEY_ERR);
cont.watcherUnusable = true; // documented since Node 10.4.1
// Workaround for https://github.com/joyent/node/issues/4337
if (isWindows$2 && error.code === 'EPERM') {
try {
const fd = await open$2(path, 'r');
await close(fd);
broadcastErr(error);
} catch (err) {}
} else {
broadcastErr(error);
}
});
cont = {
listeners: listener,
errHandlers: errHandler,
rawEmitters: rawEmitter,
watcher
};
FsWatchInstances.set(fullPath, cont);
}
// const index = cont.listeners.indexOf(listener);
// removes this instance's listeners and closes the underlying fs_watch
// instance if there are no more listeners left
return () => {
delFromSet(cont, KEY_LISTENERS, listener);
delFromSet(cont, KEY_ERR, errHandler);
delFromSet(cont, KEY_RAW, rawEmitter);
if (isEmptySet(cont.listeners)) {
// Check to protect against issue gh-730.
// if (cont.watcherUnusable) {
cont.watcher.close();
// }
FsWatchInstances.delete(fullPath);
HANDLER_KEYS.forEach(clearItem(cont));
cont.watcher = undefined;
Object.freeze(cont);
}
};
};
// fs_watchFile helpers
// object to hold per-process fs_watchFile instances
// (may be shared across chokidar FSWatcher instances)
const FsWatchFileInstances = new Map();
/**
* Instantiates the fs_watchFile interface or binds listeners
* to an existing one covering the same file system entry
* @param {String} path to be watched
* @param {String} fullPath absolute path
* @param {Object} options options to be passed to fs_watchFile
* @param {Object} handlers container for event listener functions
* @returns {Function} closer
*/
const setFsWatchFileListener = (path, fullPath, options, handlers) => {
const {listener, rawEmitter} = handlers;
let cont = FsWatchFileInstances.get(fullPath);
const copts = cont && cont.options;
if (copts && (copts.persistent < options.persistent || copts.interval > options.interval)) {
fs$7.unwatchFile(fullPath);
cont = undefined;
}
/* eslint-enable no-unused-vars, prefer-destructuring */
if (cont) {
addAndConvert(cont, KEY_LISTENERS, listener);
addAndConvert(cont, KEY_RAW, rawEmitter);
} else {
// TODO
// listeners.add(listener);
// rawEmitters.add(rawEmitter);
cont = {
listeners: listener,
rawEmitters: rawEmitter,
options,
watcher: fs$7.watchFile(fullPath, options, (curr, prev) => {
foreach(cont.rawEmitters, (rawEmitter) => {
rawEmitter(EV_CHANGE$2, fullPath, {curr, prev});
});
const currmtime = curr.mtimeMs;
if (curr.size !== prev.size || currmtime > prev.mtimeMs || currmtime === 0) {
foreach(cont.listeners, (listener) => listener(path, curr));
}
})
};
FsWatchFileInstances.set(fullPath, cont);
}
// const index = cont.listeners.indexOf(listener);
// Removes this instance's listeners and closes the underlying fs_watchFile
// instance if there are no more listeners left.
return () => {
delFromSet(cont, KEY_LISTENERS, listener);
delFromSet(cont, KEY_RAW, rawEmitter);
if (isEmptySet(cont.listeners)) {
FsWatchFileInstances.delete(fullPath);
fs$7.unwatchFile(fullPath);
cont.options = cont.watcher = undefined;
Object.freeze(cont);
}
};
};
/**
* @mixin
*/
let NodeFsHandler$1 = class NodeFsHandler {
/**
* @param {import("../index").FSWatcher} fsW
*/
constructor(fsW) {
this.fsw = fsW;
this._boundHandleError = (error) => fsW._handleError(error);
}
/**
* Watch file for changes with fs_watchFile or fs_watch.
* @param {String} path to file or dir
* @param {Function} listener on fs change
* @returns {Function} closer for the watcher instance
*/
_watchWithNodeFs(path, listener) {
const opts = this.fsw.options;
const directory = sysPath$2.dirname(path);
const basename = sysPath$2.basename(path);
const parent = this.fsw._getWatchedDir(directory);
parent.add(basename);
const absolutePath = sysPath$2.resolve(path);
const options = {persistent: opts.persistent};
if (!listener) listener = EMPTY_FN$2;
let closer;
if (opts.usePolling) {
options.interval = opts.enableBinaryInterval && isBinaryPath(basename) ?
opts.binaryInterval : opts.interval;
closer = setFsWatchFileListener(path, absolutePath, options, {
listener,
rawEmitter: this.fsw._emitRaw
});
} else {
closer = setFsWatchListener(path, absolutePath, options, {
listener,
errHandler: this._boundHandleError,
rawEmitter: this.fsw._emitRaw
});
}
return closer;
}
/**
* Watch a file and emit add event if warranted.
* @param {Path} file Path
* @param {fs.Stats} stats result of fs_stat
* @param {Boolean} initialAdd was the file added at watch instantiation?
* @returns {Function} closer for the watcher instance
*/
_handleFile(file, stats, initialAdd) {
if (this.fsw.closed) {
return;
}
const dirname = sysPath$2.dirname(file);
const basename = sysPath$2.basename(file);
const parent = this.fsw._getWatchedDir(dirname);
// stats is always present
let prevStats = stats;
// if the file is already being watched, do nothing
if (parent.has(basename)) return;
const listener = async (path, newStats) => {
if (!this.fsw._throttle(THROTTLE_MODE_WATCH, file, 5)) return;
if (!newStats || newStats.mtimeMs === 0) {
try {
const newStats = await stat$2(file);
if (this.fsw.closed) return;
// Check that change event was not fired because of changed only accessTime.
const at = newStats.atimeMs;
const mt = newStats.mtimeMs;
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
this.fsw._emit(EV_CHANGE$2, file, newStats);
}
if (isLinux && prevStats.ino !== newStats.ino) {
this.fsw._closeFile(path);
prevStats = newStats;
this.fsw._addPathCloser(path, this._watchWithNodeFs(file, listener));
} else {
prevStats = newStats;
}
} catch (error) {
// Fix issues where mtime is null but file is still present
this.fsw._remove(dirname, basename);
}
// add is about to be emitted if file not already tracked in parent
} else if (parent.has(basename)) {
// Check that change event was not fired because of changed only accessTime.
const at = newStats.atimeMs;
const mt = newStats.mtimeMs;
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
this.fsw._emit(EV_CHANGE$2, file, newStats);
}
prevStats = newStats;
}
};
// kick off the watcher
const closer = this._watchWithNodeFs(file, listener);
// emit an add event if we're supposed to
if (!(initialAdd && this.fsw.options.ignoreInitial) && this.fsw._isntIgnored(file)) {
if (!this.fsw._throttle(EV_ADD$2, file, 0)) return;
this.fsw._emit(EV_ADD$2, file, stats);
}
return closer;
}
/**
* Handle symlinks encountered while reading a dir.
* @param {Object} entry returned by readdirp
* @param {String} directory path of dir being read
* @param {String} path of this item
* @param {String} item basename of this item
* @returns {Promise<Boolean>} true if no more processing is needed for this entry.
*/
async _handleSymlink(entry, directory, path, item) {
if (this.fsw.closed) {
return;
}
const full = entry.fullPath;
const dir = this.fsw._getWatchedDir(directory);
if (!this.fsw.options.followSymlinks) {
// watch symlink directly (don't follow) and detect changes
this.fsw._incrReadyCount();
let linkPath;
try {
linkPath = await fsrealpath(path);
} catch (e) {
this.fsw._emitReady();
return true;
}
if (this.fsw.closed) return;
if (dir.has(item)) {
if (this.fsw._symlinkPaths.get(full) !== linkPath) {
this.fsw._symlinkPaths.set(full, linkPath);
this.fsw._emit(EV_CHANGE$2, path, entry.stats);
}
} else {
dir.add(item);
this.fsw._symlinkPaths.set(full, linkPath);
this.fsw._emit(EV_ADD$2, path, entry.stats);
}
this.fsw._emitReady();
return true;
}
// don't follow the same symlink more than once
if (this.fsw._symlinkPaths.has(full)) {
return true;
}
this.fsw._symlinkPaths.set(full, true);
}
_handleRead(directory, initialAdd, wh, target, dir, depth, throttler) {
// Normalize the directory name on Windows
directory = sysPath$2.join(directory, EMPTY_STR$1);
if (!wh.hasGlob) {
throttler = this.fsw._throttle('readdir', directory, 1000);
if (!throttler) return;
}
const previous = this.fsw._getWatchedDir(wh.path);
const current = new Set();
let stream = this.fsw._readdirp(directory, {
fileFilter: entry => wh.filterPath(entry),
directoryFilter: entry => wh.filterDir(entry),
depth: 0
}).on(STR_DATA$1, async (entry) => {
if (this.fsw.closed) {
stream = undefined;
return;
}
const item = entry.path;
let path = sysPath$2.join(directory, item);
current.add(item);
if (entry.stats.isSymbolicLink() && await this._handleSymlink(entry, directory, path, item)) {
return;
}
if (this.fsw.closed) {
stream = undefined;
return;
}
// Files that present in current directory snapshot
// but absent in previous are added to watch list and
// emit `add` event.
if (item === target || !target && !previous.has(item)) {
this.fsw._incrReadyCount();
// ensure relativeness of path is preserved in case of watcher reuse
path = sysPath$2.join(dir, sysPath$2.relative(dir, path));
this._addToNodeFs(path, initialAdd, wh, depth + 1);
}
}).on(EV_ERROR$2, this._boundHandleError);
return new Promise(resolve =>
stream.once(STR_END$2, () => {
if (this.fsw.closed) {
stream = undefined;
return;
}
const wasThrottled = throttler ? throttler.clear() : false;
resolve();
// Files that absent in current directory snapshot
// but present in previous emit `remove` event
// and are removed from @watched[directory].
previous.getChildren().filter((item) => {
return item !== directory &&
!current.has(item) &&
// in case of intersecting globs;
// a path may have been filtered out of this readdir, but
// shouldn't be removed because it matches a different glob
(!wh.hasGlob || wh.filterPath({
fullPath: sysPath$2.resolve(directory, item)
}));
}).forEach((item) => {
this.fsw._remove(directory, item);
});
stream = undefined;
// one more time for any missed in case changes came in extremely quickly
if (wasThrottled) this._handleRead(directory, false, wh, target, dir, depth, throttler);
})
);
}
/**
* Read directory to add / remove files from `@watched` list and re-read it on change.
* @param {String} dir fs path
* @param {fs.Stats} stats
* @param {Boolean} initialAdd
* @param {Number} depth relative to user-supplied path
* @param {String} target child path targeted for watch
* @param {Object} wh Common watch helpers for this path
* @param {String} realpath
* @returns {Promise<Function>} closer for the watcher instance.
*/
async _handleDir(dir, stats, initialAdd, depth, target, wh, realpath) {
const parentDir = this.fsw._getWatchedDir(sysPath$2.dirname(dir));
const tracked = parentDir.has(sysPath$2.basename(dir));
if (!(initialAdd && this.fsw.options.ignoreInitial) && !target && !tracked) {
if (!wh.hasGlob || wh.globFilter(dir)) this.fsw._emit(EV_ADD_DIR$2, dir, stats);
}
// ensure dir is tracked (harmless if redundant)
parentDir.add(sysPath$2.basename(dir));
this.fsw._getWatchedDir(dir);
let throttler;
let closer;
const oDepth = this.fsw.options.depth;
if ((oDepth == null || depth <= oDepth) && !this.fsw._symlinkPaths.has(realpath)) {
if (!target) {
await this._handleRead(dir, initialAdd, wh, target, dir, depth, throttler);
if (this.fsw.closed) return;
}
closer = this._watchWithNodeFs(dir, (dirPath, stats) => {
// if current directory is removed, do nothing
if (stats && stats.mtimeMs === 0) return;
this._handleRead(dirPath, false, wh, target, dir, depth, throttler);
});
}
return closer;
}
/**
* Handle added file, directory, or glob pattern.
* Delegates call to _handleFile / _handleDir after checks.
* @param {String} path to file or ir
* @param {Boolean} initialAdd was the file added at watch instantiation?
* @param {Object} priorWh depth relative to user-supplied path
* @param {Number} depth Child path actually targeted for watch
* @param {String=} target Child path actually targeted for watch
* @returns {Promise}
*/
async _addToNodeFs(path, initialAdd, priorWh, depth, target) {
const ready = this.fsw._emitReady;
if (this.fsw._isIgnored(path) || this.fsw.closed) {
ready();
return false;
}
const wh = this.fsw._getWatchHelpers(path, depth);
if (!wh.hasGlob && priorWh) {
wh.hasGlob = priorWh.hasGlob;
wh.globFilter = priorWh.globFilter;
wh.filterPath = entry => priorWh.filterPath(entry);
wh.filterDir = entry => priorWh.filterDir(entry);
}
// evaluate what is at the path we're being asked to watch
try {
const stats = await statMethods$1[wh.statMethod](wh.watchPath);
if (this.fsw.closed) return;
if (this.fsw._isIgnored(wh.watchPath, stats)) {
ready();
return false;
}
const follow = this.fsw.options.followSymlinks && !path.includes(STAR) && !path.includes(BRACE_START$1);
let closer;
if (stats.isDirectory()) {
const absPath = sysPath$2.resolve(path);
const targetPath = follow ? await fsrealpath(path) : path;
if (this.fsw.closed) return;
closer = await this._handleDir(wh.watchPath, stats, initialAdd, depth, target, wh, targetPath);
if (this.fsw.closed) return;
// preserve this symlink's target path
if (absPath !== targetPath && targetPath !== undefined) {
this.fsw._symlinkPaths.set(absPath, targetPath);
}
} else if (stats.isSymbolicLink()) {
const targetPath = follow ? await fsrealpath(path) : path;
if (this.fsw.closed) return;
const parent = sysPath$2.dirname(wh.watchPath);
this.fsw._getWatchedDir(parent).add(wh.watchPath);
this.fsw._emit(EV_ADD$2, wh.watchPath, stats);
closer = await this._handleDir(parent, stats, initialAdd, depth, path, wh, targetPath);
if (this.fsw.closed) return;
// preserve this symlink's target path
if (targetPath !== undefined) {
this.fsw._symlinkPaths.set(sysPath$2.resolve(path), targetPath);
}
} else {
closer = this._handleFile(wh.watchPath, stats, initialAdd);
}
ready();
this.fsw._addPathCloser(path, closer);
return false;
} catch (error) {
if (this.fsw._handleError(error)) {
ready();
return path;
}
}
}
};
var nodefsHandler = NodeFsHandler$1;
var fseventsHandler = {exports: {}};
const fs$6 = require$$0__default;
const sysPath$1 = require$$0$4;
const { promisify: promisify$1 } = require$$0$5;
let fsevents;
try {
fsevents = __require('fsevents');
} catch (error) {
if (process.env.CHOKIDAR_PRINT_FSEVENTS_REQUIRE_ERROR) console.error(error);
}
if (fsevents) {
// TODO: real check
const mtch = process.version.match(/v(\d+)\.(\d+)/);
if (mtch && mtch[1] && mtch[2]) {
const maj = Number.parseInt(mtch[1], 10);
const min = Number.parseInt(mtch[2], 10);
if (maj === 8 && min < 16) {
fsevents = undefined;
}
}
}
const {
EV_ADD: EV_ADD$1,
EV_CHANGE: EV_CHANGE$1,
EV_ADD_DIR: EV_ADD_DIR$1,
EV_UNLINK: EV_UNLINK$1,
EV_ERROR: EV_ERROR$1,
STR_DATA,
STR_END: STR_END$1,
FSEVENT_CREATED,
FSEVENT_MODIFIED,
FSEVENT_DELETED,
FSEVENT_MOVED,
// FSEVENT_CLONED,
FSEVENT_UNKNOWN,
FSEVENT_FLAG_MUST_SCAN_SUBDIRS,
FSEVENT_TYPE_FILE,
FSEVENT_TYPE_DIRECTORY,
FSEVENT_TYPE_SYMLINK,
ROOT_GLOBSTAR,
DIR_SUFFIX,
DOT_SLASH,
FUNCTION_TYPE: FUNCTION_TYPE$1,
EMPTY_FN: EMPTY_FN$1,
IDENTITY_FN
} = constants$1;
const Depth = (value) => isNaN(value) ? {} : {depth: value};
const stat$1 = promisify$1(fs$6.stat);
const lstat = promisify$1(fs$6.lstat);
const realpath = promisify$1(fs$6.realpath);
const statMethods = { stat: stat$1, lstat };
/**
* @typedef {String} Path
*/
/**
* @typedef {Object} FsEventsWatchContainer
* @property {Set<Function>} listeners
* @property {Function} rawEmitter
* @property {{stop: Function}} watcher
*/
// fsevents instance helper functions
/**
* Object to hold per-process fsevents instances (may be shared across chokidar FSWatcher instances)
* @type {Map<Path,FsEventsWatchContainer>}
*/
const FSEventsWatchers = new Map();
// Threshold of duplicate path prefixes at which to start
// consolidating going forward
const consolidateThreshhold = 10;
const wrongEventFlags = new Set([
69888, 70400, 71424, 72704, 73472, 131328, 131840, 262912
]);
/**
* Instantiates the fsevents interface
* @param {Path} path path to be watched
* @param {Function} callback called when fsevents is bound and ready
* @returns {{stop: Function}} new fsevents instance
*/
const createFSEventsInstance = (path, callback) => {
const stop = fsevents.watch(path, callback);
return {stop};
};
/**
* Instantiates the fsevents interface or binds listeners to an existing one covering
* the same file tree.
* @param {Path} path - to be watched
* @param {Path} realPath - real path for symlinks
* @param {Function} listener - called when fsevents emits events
* @param {Function} rawEmitter - passes data to listeners of the 'raw' event
* @returns {Function} closer
*/
function setFSEventsListener(path, realPath, listener, rawEmitter) {
let watchPath = sysPath$1.extname(realPath) ? sysPath$1.dirname(realPath) : realPath;
const parentPath = sysPath$1.dirname(watchPath);
let cont = FSEventsWatchers.get(watchPath);
// If we've accumulated a substantial number of paths that
// could have been consolidated by watching one directory
// above the current one, create a watcher on the parent
// path instead, so that we do consolidate going forward.
if (couldConsolidate(parentPath)) {
watchPath = parentPath;
}
const resolvedPath = sysPath$1.resolve(path);
const hasSymlink = resolvedPath !== realPath;
const filteredListener = (fullPath, flags, info) => {
if (hasSymlink) fullPath = fullPath.replace(realPath, resolvedPath);
if (
fullPath === resolvedPath ||
!fullPath.indexOf(resolvedPath + sysPath$1.sep)
) listener(fullPath, flags, info);
};
// check if there is already a watcher on a parent path
// modifies `watchPath` to the parent path when it finds a match
let watchedParent = false;
for (const watchedPath of FSEventsWatchers.keys()) {
if (realPath.indexOf(sysPath$1.resolve(watchedPath) + sysPath$1.sep) === 0) {
watchPath = watchedPath;
cont = FSEventsWatchers.get(watchPath);
watchedParent = true;
break;
}
}
if (cont || watchedParent) {
cont.listeners.add(filteredListener);
} else {
cont = {
listeners: new Set([filteredListener]),
rawEmitter,
watcher: createFSEventsInstance(watchPath, (fullPath, flags) => {
if (!cont.listeners.size) return;
if (flags & FSEVENT_FLAG_MUST_SCAN_SUBDIRS) return;
const info = fsevents.getInfo(fullPath, flags);
cont.listeners.forEach(list => {
list(fullPath, flags, info);
});
cont.rawEmitter(info.event, fullPath, info);
})
};
FSEventsWatchers.set(watchPath, cont);
}
// removes this instance's listeners and closes the underlying fsevents
// instance if there are no more listeners left
return () => {
const lst = cont.listeners;
lst.delete(filteredListener);
if (!lst.size) {
FSEventsWatchers.delete(watchPath);
if (cont.watcher) return cont.watcher.stop().then(() => {
cont.rawEmitter = cont.watcher = undefined;
Object.freeze(cont);
});
}
};
}
// Decide whether or not we should start a new higher-level
// parent watcher
const couldConsolidate = (path) => {
let count = 0;
for (const watchPath of FSEventsWatchers.keys()) {
if (watchPath.indexOf(path) === 0) {
count++;
if (count >= consolidateThreshhold) {
return true;
}
}
}
return false;
};
// returns boolean indicating whether fsevents can be used
const canUse = () => fsevents && FSEventsWatchers.size < 128;
// determines subdirectory traversal levels from root to path
const calcDepth = (path, root) => {
let i = 0;
while (!path.indexOf(root) && (path = sysPath$1.dirname(path)) !== root) i++;
return i;
};
// returns boolean indicating whether the fsevents' event info has the same type
// as the one returned by fs.stat
const sameTypes = (info, stats) => (
info.type === FSEVENT_TYPE_DIRECTORY && stats.isDirectory() ||
info.type === FSEVENT_TYPE_SYMLINK && stats.isSymbolicLink() ||
info.type === FSEVENT_TYPE_FILE && stats.isFile()
);
/**
* @mixin
*/
let FsEventsHandler$1 = class FsEventsHandler {
/**
* @param {import('../index').FSWatcher} fsw
*/
constructor(fsw) {
this.fsw = fsw;
}
checkIgnored(path, stats) {
const ipaths = this.fsw._ignoredPaths;
if (this.fsw._isIgnored(path, stats)) {
ipaths.add(path);
if (stats && stats.isDirectory()) {
ipaths.add(path + ROOT_GLOBSTAR);
}
return true;
}
ipaths.delete(path);
ipaths.delete(path + ROOT_GLOBSTAR);
}
addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts) {
const event = watchedDir.has(item) ? EV_CHANGE$1 : EV_ADD$1;
this.handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
async checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts) {
try {
const stats = await stat$1(path);
if (this.fsw.closed) return;
if (sameTypes(info, stats)) {
this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
} else {
this.handleEvent(EV_UNLINK$1, path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
} catch (error) {
if (error.code === 'EACCES') {
this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
} else {
this.handleEvent(EV_UNLINK$1, path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
}
}
handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts) {
if (this.fsw.closed || this.checkIgnored(path)) return;
if (event === EV_UNLINK$1) {
const isDirectory = info.type === FSEVENT_TYPE_DIRECTORY;
// suppress unlink events on never before seen files
if (isDirectory || watchedDir.has(item)) {
this.fsw._remove(parent, item, isDirectory);
}
} else {
if (event === EV_ADD$1) {
// track new directories
if (info.type === FSEVENT_TYPE_DIRECTORY) this.fsw._getWatchedDir(path);
if (info.type === FSEVENT_TYPE_SYMLINK && opts.followSymlinks) {
// push symlinks back to the top of the stack to get handled
const curDepth = opts.depth === undefined ?
undefined : calcDepth(fullPath, realPath) + 1;
return this._addToFsEvents(path, false, true, curDepth);
}
// track new paths
// (other than symlinks being followed, which will be tracked soon)
this.fsw._getWatchedDir(parent).add(item);
}
/**
* @type {'add'|'addDir'|'unlink'|'unlinkDir'}
*/
const eventName = info.type === FSEVENT_TYPE_DIRECTORY ? event + DIR_SUFFIX : event;
this.fsw._emit(eventName, path);
if (eventName === EV_ADD_DIR$1) this._addToFsEvents(path, false, true);
}
}
/**
* Handle symlinks encountered during directory scan
* @param {String} watchPath - file/dir path to be watched with fsevents
* @param {String} realPath - real path (in case of symlinks)
* @param {Function} transform - path transformer
* @param {Function} globFilter - path filter in case a glob pattern was provided
* @returns {Function} closer for the watcher instance
*/
_watchWithFsEvents(watchPath, realPath, transform, globFilter) {
if (this.fsw.closed || this.fsw._isIgnored(watchPath)) return;
const opts = this.fsw.options;
const watchCallback = async (fullPath, flags, info) => {
// PATCH: bypass the callback for better perf when fullPath hit the ignored file list
if (this.fsw.closed || this.fsw._isIgnored(fullPath)) return;
if (
opts.depth !== undefined &&
calcDepth(fullPath, realPath) > opts.depth
) return;
const path = transform(sysPath$1.join(
watchPath, sysPath$1.relative(watchPath, fullPath)
));
if (globFilter && !globFilter(path)) return;
// ensure directories are tracked
const parent = sysPath$1.dirname(path);
const item = sysPath$1.basename(path);
const watchedDir = this.fsw._getWatchedDir(
info.type === FSEVENT_TYPE_DIRECTORY ? path : parent
);
// correct for wrong events emitted
if (wrongEventFlags.has(flags) || info.event === FSEVENT_UNKNOWN) {
if (typeof opts.ignored === FUNCTION_TYPE$1) {
let stats;
try {
stats = await stat$1(path);
} catch (error) {}
if (this.fsw.closed) return;
if (this.checkIgnored(path, stats)) return;
if (sameTypes(info, stats)) {
this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
} else {
this.handleEvent(EV_UNLINK$1, path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
} else {
this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
} else {
switch (info.event) {
case FSEVENT_CREATED:
case FSEVENT_MODIFIED:
return this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
case FSEVENT_DELETED:
case FSEVENT_MOVED:
return this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts);
}
}
};
const closer = setFSEventsListener(
watchPath,
realPath,
watchCallback,
this.fsw._emitRaw
);
this.fsw._emitReady();
return closer;
}
/**
* Handle symlinks encountered during directory scan
* @param {String} linkPath path to symlink
* @param {String} fullPath absolute path to the symlink
* @param {Function} transform pre-existing path transformer
* @param {Number} curDepth level of subdirectories traversed to where symlink is
* @returns {Promise<void>}
*/
async _handleFsEventsSymlink(linkPath, fullPath, transform, curDepth) {
// don't follow the same symlink more than once
if (this.fsw.closed || this.fsw._symlinkPaths.has(fullPath)) return;
this.fsw._symlinkPaths.set(fullPath, true);
this.fsw._incrReadyCount();
try {
const linkTarget = await realpath(linkPath);
if (this.fsw.closed) return;
if (this.fsw._isIgnored(linkTarget)) {
return this.fsw._emitReady();
}
this.fsw._incrReadyCount();
// add the linkTarget for watching with a wrapper for transform
// that causes emitted paths to incorporate the link's path
this._addToFsEvents(linkTarget || linkPath, (path) => {
let aliasedPath = linkPath;
if (linkTarget && linkTarget !== DOT_SLASH) {
aliasedPath = path.replace(linkTarget, linkPath);
} else if (path !== DOT_SLASH) {
aliasedPath = sysPath$1.join(linkPath, path);
}
return transform(aliasedPath);
}, false, curDepth);
} catch(error) {
if (this.fsw._handleError(error)) {
return this.fsw._emitReady();
}
}
}
/**
*
* @param {Path} newPath
* @param {fs.Stats} stats
*/
emitAdd(newPath, stats, processPath, opts, forceAdd) {
const pp = processPath(newPath);
const isDir = stats.isDirectory();
const dirObj = this.fsw._getWatchedDir(sysPath$1.dirname(pp));
const base = sysPath$1.basename(pp);
// ensure empty dirs get tracked
if (isDir) this.fsw._getWatchedDir(pp);
if (dirObj.has(base)) return;
dirObj.add(base);
if (!opts.ignoreInitial || forceAdd === true) {
this.fsw._emit(isDir ? EV_ADD_DIR$1 : EV_ADD$1, pp, stats);
}
}
initWatch(realPath, path, wh, processPath) {
if (this.fsw.closed) return;
const closer = this._watchWithFsEvents(
wh.watchPath,
sysPath$1.resolve(realPath || wh.watchPath),
processPath,
wh.globFilter
);
this.fsw._addPathCloser(path, closer);
}
/**
* Handle added path with fsevents
* @param {String} path file/dir path or glob pattern
* @param {Function|Boolean=} transform converts working path to what the user expects
* @param {Boolean=} forceAdd ensure add is emitted
* @param {Number=} priorDepth Level of subdirectories already traversed.
* @returns {Promise<void>}
*/
async _addToFsEvents(path, transform, forceAdd, priorDepth) {
if (this.fsw.closed) {
return;
}
const opts = this.fsw.options;
const processPath = typeof transform === FUNCTION_TYPE$1 ? transform : IDENTITY_FN;
const wh = this.fsw._getWatchHelpers(path);
// evaluate what is at the path we're being asked to watch
try {
const stats = await statMethods[wh.statMethod](wh.watchPath);
if (this.fsw.closed) return;
if (this.fsw._isIgnored(wh.watchPath, stats)) {
throw null;
}
if (stats.isDirectory()) {
// emit addDir unless this is a glob parent
if (!wh.globFilter) this.emitAdd(processPath(path), stats, processPath, opts, forceAdd);
// don't recurse further if it would exceed depth setting
if (priorDepth && priorDepth > opts.depth) return;
// scan the contents of the dir
this.fsw._readdirp(wh.watchPath, {
fileFilter: entry => wh.filterPath(entry),
directoryFilter: entry => wh.filterDir(entry),
...Depth(opts.depth - (priorDepth || 0))
}).on(STR_DATA, (entry) => {
// need to check filterPath on dirs b/c filterDir is less restrictive
if (this.fsw.closed) {
return;
}
if (entry.stats.isDirectory() && !wh.filterPath(entry)) return;
const joinedPath = sysPath$1.join(wh.watchPath, entry.path);
const {fullPath} = entry;
if (wh.followSymlinks && entry.stats.isSymbolicLink()) {
// preserve the current depth here since it can't be derived from
// real paths past the symlink
const curDepth = opts.depth === undefined ?
undefined : calcDepth(joinedPath, sysPath$1.resolve(wh.watchPath)) + 1;
this._handleFsEventsSymlink(joinedPath, fullPath, processPath, curDepth);
} else {
this.emitAdd(joinedPath, entry.stats, processPath, opts, forceAdd);
}
}).on(EV_ERROR$1, EMPTY_FN$1).on(STR_END$1, () => {
this.fsw._emitReady();
});
} else {
this.emitAdd(wh.watchPath, stats, processPath, opts, forceAdd);
this.fsw._emitReady();
}
} catch (error) {
if (!error || this.fsw._handleError(error)) {
// TODO: Strange thing: "should not choke on an ignored watch path" will be failed without 2 ready calls -__-
this.fsw._emitReady();
this.fsw._emitReady();
}
}
if (opts.persistent && forceAdd !== true) {
if (typeof transform === FUNCTION_TYPE$1) {
// realpath has already been resolved
this.initWatch(undefined, path, wh, processPath);
} else {
let realPath;
try {
realPath = await realpath(wh.watchPath);
} catch (e) {}
this.initWatch(realPath, path, wh, processPath);
}
}
}
};
fseventsHandler.exports = FsEventsHandler$1;
fseventsHandler.exports.canUse = canUse;
var fseventsHandlerExports = fseventsHandler.exports;
const { EventEmitter: EventEmitter$2 } = require$$0$7;
const fs$5 = require$$0__default;
const sysPath = require$$0$4;
const { promisify } = require$$0$5;
const readdirp = readdirp_1;
const anymatch = anymatchExports.default;
const globParent = globParent$2;
const isGlob = isGlob$2;
const braces = braces_1;
const normalizePath = normalizePath$2;
const NodeFsHandler = nodefsHandler;
const FsEventsHandler = fseventsHandlerExports;
const {
EV_ALL,
EV_READY,
EV_ADD,
EV_CHANGE,
EV_UNLINK,
EV_ADD_DIR,
EV_UNLINK_DIR,
EV_RAW,
EV_ERROR,
STR_CLOSE,
STR_END,
BACK_SLASH_RE,
DOUBLE_SLASH_RE,
SLASH_OR_BACK_SLASH_RE,
DOT_RE,
REPLACER_RE,
SLASH,
SLASH_SLASH,
BRACE_START,
BANG,
ONE_DOT,
TWO_DOTS,
GLOBSTAR,
SLASH_GLOBSTAR,
ANYMATCH_OPTS,
STRING_TYPE,
FUNCTION_TYPE,
EMPTY_STR,
EMPTY_FN,
isWindows: isWindows$1,
isMacos,
isIBMi
} = constants$1;
const stat = promisify(fs$5.stat);
const readdir = promisify(fs$5.readdir);
/**
* @typedef {String} Path
* @typedef {'all'|'add'|'addDir'|'change'|'unlink'|'unlinkDir'|'raw'|'error'|'ready'} EventName
* @typedef {'readdir'|'watch'|'add'|'remove'|'change'} ThrottleType
*/
/**
*
* @typedef {Object} WatchHelpers
* @property {Boolean} followSymlinks
* @property {'stat'|'lstat'} statMethod
* @property {Path} path
* @property {Path} watchPath
* @property {Function} entryPath
* @property {Boolean} hasGlob
* @property {Object} globFilter
* @property {Function} filterPath
* @property {Function} filterDir
*/
const arrify = (value = []) => Array.isArray(value) ? value : [value];
const flatten = (list, result = []) => {
list.forEach(item => {
if (Array.isArray(item)) {
flatten(item, result);
} else {
result.push(item);
}
});
return result;
};
const unifyPaths = (paths_) => {
/**
* @type {Array<String>}
*/
const paths = flatten(arrify(paths_));
if (!paths.every(p => typeof p === STRING_TYPE)) {
throw new TypeError(`Non-string provided as watch path: ${paths}`);
}
return paths.map(normalizePathToUnix);
};
// If SLASH_SLASH occurs at the beginning of path, it is not replaced
// because "//StoragePC/DrivePool/Movies" is a valid network path
const toUnix = (string) => {
let str = string.replace(BACK_SLASH_RE, SLASH);
let prepend = false;
if (str.startsWith(SLASH_SLASH)) {
prepend = true;
}
while (str.match(DOUBLE_SLASH_RE)) {
str = str.replace(DOUBLE_SLASH_RE, SLASH);
}
if (prepend) {
str = SLASH + str;
}
return str;
};
// Our version of upath.normalize
// TODO: this is not equal to path-normalize module - investigate why
const normalizePathToUnix = (path) => toUnix(sysPath.normalize(toUnix(path)));
const normalizeIgnored = (cwd = EMPTY_STR) => (path) => {
if (typeof path !== STRING_TYPE) return path;
return normalizePathToUnix(sysPath.isAbsolute(path) ? path : sysPath.join(cwd, path));
};
const getAbsolutePath = (path, cwd) => {
if (sysPath.isAbsolute(path)) {
return path;
}
if (path.startsWith(BANG)) {
return BANG + sysPath.join(cwd, path.slice(1));
}
return sysPath.join(cwd, path);
};
const undef = (opts, key) => opts[key] === undefined;
/**
* Directory entry.
* @property {Path} path
* @property {Set<Path>} items
*/
class DirEntry {
/**
* @param {Path} dir
* @param {Function} removeWatcher
*/
constructor(dir, removeWatcher) {
this.path = dir;
this._removeWatcher = removeWatcher;
/** @type {Set<Path>} */
this.items = new Set();
}
add(item) {
const {items} = this;
if (!items) return;
if (item !== ONE_DOT && item !== TWO_DOTS) items.add(item);
}
async remove(item) {
const {items} = this;
if (!items) return;
items.delete(item);
if (items.size > 0) return;
const dir = this.path;
try {
await readdir(dir);
} catch (err) {
if (this._removeWatcher) {
this._removeWatcher(sysPath.dirname(dir), sysPath.basename(dir));
}
}
}
has(item) {
const {items} = this;
if (!items) return;
return items.has(item);
}
/**
* @returns {Array<String>}
*/
getChildren() {
const {items} = this;
if (!items) return;
return [...items.values()];
}
dispose() {
this.items.clear();
delete this.path;
delete this._removeWatcher;
delete this.items;
Object.freeze(this);
}
}
const STAT_METHOD_F = 'stat';
const STAT_METHOD_L = 'lstat';
class WatchHelper {
constructor(path, watchPath, follow, fsw) {
this.fsw = fsw;
this.path = path = path.replace(REPLACER_RE, EMPTY_STR);
this.watchPath = watchPath;
this.fullWatchPath = sysPath.resolve(watchPath);
this.hasGlob = watchPath !== path;
/** @type {object|boolean} */
if (path === EMPTY_STR) this.hasGlob = false;
this.globSymlink = this.hasGlob && follow ? undefined : false;
this.globFilter = this.hasGlob ? anymatch(path, undefined, ANYMATCH_OPTS) : false;
this.dirParts = this.getDirParts(path);
this.dirParts.forEach((parts) => {
if (parts.length > 1) parts.pop();
});
this.followSymlinks = follow;
this.statMethod = follow ? STAT_METHOD_F : STAT_METHOD_L;
}
checkGlobSymlink(entry) {
// only need to resolve once
// first entry should always have entry.parentDir === EMPTY_STR
if (this.globSymlink === undefined) {
this.globSymlink = entry.fullParentDir === this.fullWatchPath ?
false : {realPath: entry.fullParentDir, linkPath: this.fullWatchPath};
}
if (this.globSymlink) {
return entry.fullPath.replace(this.globSymlink.realPath, this.globSymlink.linkPath);
}
return entry.fullPath;
}
entryPath(entry) {
return sysPath.join(this.watchPath,
sysPath.relative(this.watchPath, this.checkGlobSymlink(entry))
);
}
filterPath(entry) {
const {stats} = entry;
if (stats && stats.isSymbolicLink()) return this.filterDir(entry);
const resolvedPath = this.entryPath(entry);
const matchesGlob = this.hasGlob && typeof this.globFilter === FUNCTION_TYPE ?
this.globFilter(resolvedPath) : true;
return matchesGlob &&
this.fsw._isntIgnored(resolvedPath, stats) &&
this.fsw._hasReadPermissions(stats);
}
getDirParts(path) {
if (!this.hasGlob) return [];
const parts = [];
const expandedPath = path.includes(BRACE_START) ? braces.expand(path) : [path];
expandedPath.forEach((path) => {
parts.push(sysPath.relative(this.watchPath, path).split(SLASH_OR_BACK_SLASH_RE));
});
return parts;
}
filterDir(entry) {
if (this.hasGlob) {
const entryParts = this.getDirParts(this.checkGlobSymlink(entry));
let globstar = false;
this.unmatchedGlob = !this.dirParts.some((parts) => {
return parts.every((part, i) => {
if (part === GLOBSTAR) globstar = true;
return globstar || !entryParts[0][i] || anymatch(part, entryParts[0][i], ANYMATCH_OPTS);
});
});
}
return !this.unmatchedGlob && this.fsw._isntIgnored(this.entryPath(entry), entry.stats);
}
}
/**
* Watches files & directories for changes. Emitted events:
* `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error`
*
* new FSWatcher()
* .add(directories)
* .on('add', path => log('File', path, 'was added'))
*/
class FSWatcher extends EventEmitter$2 {
// Not indenting methods for history sake; for now.
constructor(_opts) {
super();
const opts = {};
if (_opts) Object.assign(opts, _opts); // for frozen objects
/** @type {Map<String, DirEntry>} */
this._watched = new Map();
/** @type {Map<String, Array>} */
this._closers = new Map();
/** @type {Set<String>} */
this._ignoredPaths = new Set();
/** @type {Map<ThrottleType, Map>} */
this._throttled = new Map();
/** @type {Map<Path, String|Boolean>} */
this._symlinkPaths = new Map();
this._streams = new Set();
this.closed = false;
// Set up default options.
if (undef(opts, 'persistent')) opts.persistent = true;
if (undef(opts, 'ignoreInitial')) opts.ignoreInitial = false;
if (undef(opts, 'ignorePermissionErrors')) opts.ignorePermissionErrors = false;
if (undef(opts, 'interval')) opts.interval = 100;
if (undef(opts, 'binaryInterval')) opts.binaryInterval = 300;
if (undef(opts, 'disableGlobbing')) opts.disableGlobbing = false;
opts.enableBinaryInterval = opts.binaryInterval !== opts.interval;
// Enable fsevents on OS X when polling isn't explicitly enabled.
if (undef(opts, 'useFsEvents')) opts.useFsEvents = !opts.usePolling;
// If we can't use fsevents, ensure the options reflect it's disabled.
const canUseFsEvents = FsEventsHandler.canUse();
if (!canUseFsEvents) opts.useFsEvents = false;
// Use polling on Mac if not using fsevents.
// Other platforms use non-polling fs_watch.
if (undef(opts, 'usePolling') && !opts.useFsEvents) {
opts.usePolling = isMacos;
}
// Always default to polling on IBM i because fs.watch() is not available on IBM i.
if(isIBMi) {
opts.usePolling = true;
}
// Global override (useful for end-developers that need to force polling for all
// instances of chokidar, regardless of usage/dependency depth)
const envPoll = process.env.CHOKIDAR_USEPOLLING;
if (envPoll !== undefined) {
const envLower = envPoll.toLowerCase();
if (envLower === 'false' || envLower === '0') {
opts.usePolling = false;
} else if (envLower === 'true' || envLower === '1') {
opts.usePolling = true;
} else {
opts.usePolling = !!envLower;
}
}
const envInterval = process.env.CHOKIDAR_INTERVAL;
if (envInterval) {
opts.interval = Number.parseInt(envInterval, 10);
}
// Editor atomic write normalization enabled by default with fs.watch
if (undef(opts, 'atomic')) opts.atomic = !opts.usePolling && !opts.useFsEvents;
if (opts.atomic) this._pendingUnlinks = new Map();
if (undef(opts, 'followSymlinks')) opts.followSymlinks = true;
if (undef(opts, 'awaitWriteFinish')) opts.awaitWriteFinish = false;
if (opts.awaitWriteFinish === true) opts.awaitWriteFinish = {};
const awf = opts.awaitWriteFinish;
if (awf) {
if (!awf.stabilityThreshold) awf.stabilityThreshold = 2000;
if (!awf.pollInterval) awf.pollInterval = 100;
this._pendingWrites = new Map();
}
if (opts.ignored) opts.ignored = arrify(opts.ignored);
let readyCalls = 0;
this._emitReady = () => {
readyCalls++;
if (readyCalls >= this._readyCount) {
this._emitReady = EMPTY_FN;
this._readyEmitted = true;
// use process.nextTick to allow time for listener to be bound
process.nextTick(() => this.emit(EV_READY));
}
};
this._emitRaw = (...args) => this.emit(EV_RAW, ...args);
this._readyEmitted = false;
this.options = opts;
// Initialize with proper watcher.
if (opts.useFsEvents) {
this._fsEventsHandler = new FsEventsHandler(this);
} else {
this._nodeFsHandler = new NodeFsHandler(this);
}
// You’re frozen when your heart’s not open.
Object.freeze(opts);
}
// Public methods
/**
* Adds paths to be watched on an existing FSWatcher instance
* @param {Path|Array<Path>} paths_
* @param {String=} _origAdd private; for handling non-existent paths to be watched
* @param {Boolean=} _internal private; indicates a non-user add
* @returns {FSWatcher} for chaining
*/
add(paths_, _origAdd, _internal) {
const {cwd, disableGlobbing} = this.options;
this.closed = false;
let paths = unifyPaths(paths_);
if (cwd) {
paths = paths.map((path) => {
const absPath = getAbsolutePath(path, cwd);
// Check `path` instead of `absPath` because the cwd portion can't be a glob
if (disableGlobbing || !isGlob(path)) {
return absPath;
}
return normalizePath(absPath);
});
}
// set aside negated glob strings
paths = paths.filter((path) => {
if (path.startsWith(BANG)) {
this._ignoredPaths.add(path.slice(1));
return false;
}
// if a path is being added that was previously ignored, stop ignoring it
this._ignoredPaths.delete(path);
this._ignoredPaths.delete(path + SLASH_GLOBSTAR);
// reset the cached userIgnored anymatch fn
// to make ignoredPaths changes effective
this._userIgnored = undefined;
return true;
});
if (this.options.useFsEvents && this._fsEventsHandler) {
if (!this._readyCount) this._readyCount = paths.length;
if (this.options.persistent) this._readyCount += paths.length;
paths.forEach((path) => this._fsEventsHandler._addToFsEvents(path));
} else {
if (!this._readyCount) this._readyCount = 0;
this._readyCount += paths.length;
Promise.all(
paths.map(async path => {
const res = await this._nodeFsHandler._addToNodeFs(path, !_internal, 0, 0, _origAdd);
if (res) this._emitReady();
return res;
})
).then(results => {
if (this.closed) return;
results.filter(item => item).forEach(item => {
this.add(sysPath.dirname(item), sysPath.basename(_origAdd || item));
});
});
}
return this;
}
/**
* Close watchers or start ignoring events from specified paths.
* @param {Path|Array<Path>} paths_ - string or array of strings, file/directory paths and/or globs
* @returns {FSWatcher} for chaining
*/
unwatch(paths_) {
if (this.closed) return this;
const paths = unifyPaths(paths_);
const {cwd} = this.options;
paths.forEach((path) => {
// convert to absolute path unless relative path already matches
if (!sysPath.isAbsolute(path) && !this._closers.has(path)) {
if (cwd) path = sysPath.join(cwd, path);
path = sysPath.resolve(path);
}
this._closePath(path);
this._ignoredPaths.add(path);
if (this._watched.has(path)) {
this._ignoredPaths.add(path + SLASH_GLOBSTAR);
}
// reset the cached userIgnored anymatch fn
// to make ignoredPaths changes effective
this._userIgnored = undefined;
});
return this;
}
/**
* Close watchers and remove all listeners from watched paths.
* @returns {Promise<void>}.
*/
close() {
if (this.closed) return this._closePromise;
this.closed = true;
// Memory management.
this.removeAllListeners();
const closers = [];
this._closers.forEach(closerList => closerList.forEach(closer => {
const promise = closer();
if (promise instanceof Promise) closers.push(promise);
}));
this._streams.forEach(stream => stream.destroy());
this._userIgnored = undefined;
this._readyCount = 0;
this._readyEmitted = false;
this._watched.forEach(dirent => dirent.dispose());
['closers', 'watched', 'streams', 'symlinkPaths', 'throttled'].forEach(key => {
this[`_${key}`].clear();
});
this._closePromise = closers.length ? Promise.all(closers).then(() => undefined) : Promise.resolve();
return this._closePromise;
}
/**
* Expose list of watched paths
* @returns {Object} for chaining
*/
getWatched() {
const watchList = {};
this._watched.forEach((entry, dir) => {
const key = this.options.cwd ? sysPath.relative(this.options.cwd, dir) : dir;
watchList[key || ONE_DOT] = entry.getChildren().sort();
});
return watchList;
}
emitWithAll(event, args) {
this.emit(...args);
if (event !== EV_ERROR) this.emit(EV_ALL, ...args);
}
// Common helpers
// --------------
/**
* Normalize and emit events.
* Calling _emit DOES NOT MEAN emit() would be called!
* @param {EventName} event Type of event
* @param {Path} path File or directory path
* @param {*=} val1 arguments to be passed with event
* @param {*=} val2
* @param {*=} val3
* @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag
*/
async _emit(event, path, val1, val2, val3) {
if (this.closed) return;
const opts = this.options;
if (isWindows$1) path = sysPath.normalize(path);
if (opts.cwd) path = sysPath.relative(opts.cwd, path);
/** @type Array<any> */
const args = [event, path];
if (val3 !== undefined) args.push(val1, val2, val3);
else if (val2 !== undefined) args.push(val1, val2);
else if (val1 !== undefined) args.push(val1);
const awf = opts.awaitWriteFinish;
let pw;
if (awf && (pw = this._pendingWrites.get(path))) {
pw.lastChange = new Date();
return this;
}
if (opts.atomic) {
if (event === EV_UNLINK) {
this._pendingUnlinks.set(path, args);
setTimeout(() => {
this._pendingUnlinks.forEach((entry, path) => {
this.emit(...entry);
this.emit(EV_ALL, ...entry);
this._pendingUnlinks.delete(path);
});
}, typeof opts.atomic === 'number' ? opts.atomic : 100);
return this;
}
if (event === EV_ADD && this._pendingUnlinks.has(path)) {
event = args[0] = EV_CHANGE;
this._pendingUnlinks.delete(path);
}
}
if (awf && (event === EV_ADD || event === EV_CHANGE) && this._readyEmitted) {
const awfEmit = (err, stats) => {
if (err) {
event = args[0] = EV_ERROR;
args[1] = err;
this.emitWithAll(event, args);
} else if (stats) {
// if stats doesn't exist the file must have been deleted
if (args.length > 2) {
args[2] = stats;
} else {
args.push(stats);
}
this.emitWithAll(event, args);
}
};
this._awaitWriteFinish(path, awf.stabilityThreshold, event, awfEmit);
return this;
}
if (event === EV_CHANGE) {
const isThrottled = !this._throttle(EV_CHANGE, path, 50);
if (isThrottled) return this;
}
if (opts.alwaysStat && val1 === undefined &&
(event === EV_ADD || event === EV_ADD_DIR || event === EV_CHANGE)
) {
const fullPath = opts.cwd ? sysPath.join(opts.cwd, path) : path;
let stats;
try {
stats = await stat(fullPath);
} catch (err) {}
// Suppress event when fs_stat fails, to avoid sending undefined 'stat'
if (!stats || this.closed) return;
args.push(stats);
}
this.emitWithAll(event, args);
return this;
}
/**
* Common handler for errors
* @param {Error} error
* @returns {Error|Boolean} The error if defined, otherwise the value of the FSWatcher instance's `closed` flag
*/
_handleError(error) {
const code = error && error.code;
if (error && code !== 'ENOENT' && code !== 'ENOTDIR' &&
(!this.options.ignorePermissionErrors || (code !== 'EPERM' && code !== 'EACCES'))
) {
this.emit(EV_ERROR, error);
}
return error || this.closed;
}
/**
* Helper utility for throttling
* @param {ThrottleType} actionType type being throttled
* @param {Path} path being acted upon
* @param {Number} timeout duration of time to suppress duplicate actions
* @returns {Object|false} tracking object or false if action should be suppressed
*/
_throttle(actionType, path, timeout) {
if (!this._throttled.has(actionType)) {
this._throttled.set(actionType, new Map());
}
/** @type {Map<Path, Object>} */
const action = this._throttled.get(actionType);
/** @type {Object} */
const actionPath = action.get(path);
if (actionPath) {
actionPath.count++;
return false;
}
let timeoutObject;
const clear = () => {
const item = action.get(path);
const count = item ? item.count : 0;
action.delete(path);
clearTimeout(timeoutObject);
if (item) clearTimeout(item.timeoutObject);
return count;
};
timeoutObject = setTimeout(clear, timeout);
const thr = {timeoutObject, clear, count: 0};
action.set(path, thr);
return thr;
}
_incrReadyCount() {
return this._readyCount++;
}
/**
* Awaits write operation to finish.
* Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback.
* @param {Path} path being acted upon
* @param {Number} threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished
* @param {EventName} event
* @param {Function} awfEmit Callback to be called when ready for event to be emitted.
*/
_awaitWriteFinish(path, threshold, event, awfEmit) {
let timeoutHandler;
let fullPath = path;
if (this.options.cwd && !sysPath.isAbsolute(path)) {
fullPath = sysPath.join(this.options.cwd, path);
}
const now = new Date();
const awaitWriteFinish = (prevStat) => {
fs$5.stat(fullPath, (err, curStat) => {
if (err || !this._pendingWrites.has(path)) {
if (err && err.code !== 'ENOENT') awfEmit(err);
return;
}
const now = Number(new Date());
if (prevStat && curStat.size !== prevStat.size) {
this._pendingWrites.get(path).lastChange = now;
}
const pw = this._pendingWrites.get(path);
const df = now - pw.lastChange;
if (df >= threshold) {
this._pendingWrites.delete(path);
awfEmit(undefined, curStat);
} else {
timeoutHandler = setTimeout(
awaitWriteFinish,
this.options.awaitWriteFinish.pollInterval,
curStat
);
}
});
};
if (!this._pendingWrites.has(path)) {
this._pendingWrites.set(path, {
lastChange: now,
cancelWait: () => {
this._pendingWrites.delete(path);
clearTimeout(timeoutHandler);
return event;
}
});
timeoutHandler = setTimeout(
awaitWriteFinish,
this.options.awaitWriteFinish.pollInterval
);
}
}
_getGlobIgnored() {
return [...this._ignoredPaths.values()];
}
/**
* Determines whether user has asked to ignore this path.
* @param {Path} path filepath or dir
* @param {fs.Stats=} stats result of fs.stat
* @returns {Boolean}
*/
_isIgnored(path, stats) {
if (this.options.atomic && DOT_RE.test(path)) return true;
if (!this._userIgnored) {
const {cwd} = this.options;
const ign = this.options.ignored;
const ignored = ign && ign.map(normalizeIgnored(cwd));
const paths = arrify(ignored)
.filter((path) => typeof path === STRING_TYPE && !isGlob(path))
.map((path) => path + SLASH_GLOBSTAR);
const list = this._getGlobIgnored().map(normalizeIgnored(cwd)).concat(ignored, paths);
this._userIgnored = anymatch(list, undefined, ANYMATCH_OPTS);
}
return this._userIgnored([path, stats]);
}
_isntIgnored(path, stat) {
return !this._isIgnored(path, stat);
}
/**
* Provides a set of common helpers and properties relating to symlink and glob handling.
* @param {Path} path file, directory, or glob pattern being watched
* @param {Number=} depth at any depth > 0, this isn't a glob
* @returns {WatchHelper} object containing helpers for this path
*/
_getWatchHelpers(path, depth) {
const watchPath = depth || this.options.disableGlobbing || !isGlob(path) ? path : globParent(path);
const follow = this.options.followSymlinks;
return new WatchHelper(path, watchPath, follow, this);
}
// Directory helpers
// -----------------
/**
* Provides directory tracking objects
* @param {String} directory path of the directory
* @returns {DirEntry} the directory's tracking object
*/
_getWatchedDir(directory) {
if (!this._boundRemove) this._boundRemove = this._remove.bind(this);
const dir = sysPath.resolve(directory);
if (!this._watched.has(dir)) this._watched.set(dir, new DirEntry(dir, this._boundRemove));
return this._watched.get(dir);
}
// File helpers
// ------------
/**
* Check for read permissions.
* Based on this answer on SO: https://stackoverflow.com/a/11781404/1358405
* @param {fs.Stats} stats - object, result of fs_stat
* @returns {Boolean} indicates whether the file can be read
*/
_hasReadPermissions(stats) {
if (this.options.ignorePermissionErrors) return true;
// stats.mode may be bigint
const md = stats && Number.parseInt(stats.mode, 10);
const st = md & 0o777;
const it = Number.parseInt(st.toString(8)[0], 10);
return Boolean(4 & it);
}
/**
* Handles emitting unlink events for
* files and directories, and via recursion, for
* files and directories within directories that are unlinked
* @param {String} directory within which the following item is located
* @param {String} item base path of item/directory
* @returns {void}
*/
_remove(directory, item, isDirectory) {
// if what is being deleted is a directory, get that directory's paths
// for recursive deleting and cleaning of watched object
// if it is not a directory, nestedDirectoryChildren will be empty array
const path = sysPath.join(directory, item);
const fullPath = sysPath.resolve(path);
isDirectory = isDirectory != null
? isDirectory
: this._watched.has(path) || this._watched.has(fullPath);
// prevent duplicate handling in case of arriving here nearly simultaneously
// via multiple paths (such as _handleFile and _handleDir)
if (!this._throttle('remove', path, 100)) return;
// if the only watched file is removed, watch for its return
if (!isDirectory && !this.options.useFsEvents && this._watched.size === 1) {
this.add(directory, item, true);
}
// This will create a new entry in the watched object in either case
// so we got to do the directory check beforehand
const wp = this._getWatchedDir(path);
const nestedDirectoryChildren = wp.getChildren();
// Recursively remove children directories / files.
nestedDirectoryChildren.forEach(nested => this._remove(path, nested));
// Check if item was on the watched list and remove it
const parent = this._getWatchedDir(directory);
const wasTracked = parent.has(item);
parent.remove(item);
// Fixes issue #1042 -> Relative paths were detected and added as symlinks
// (https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L612),
// but never removed from the map in case the path was deleted.
// This leads to an incorrect state if the path was recreated:
// https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L553
if (this._symlinkPaths.has(fullPath)) {
this._symlinkPaths.delete(fullPath);
}
// If we wait for this file to be fully written, cancel the wait.
let relPath = path;
if (this.options.cwd) relPath = sysPath.relative(this.options.cwd, path);
if (this.options.awaitWriteFinish && this._pendingWrites.has(relPath)) {
const event = this._pendingWrites.get(relPath).cancelWait();
if (event === EV_ADD) return;
}
// The Entry will either be a directory that just got removed
// or a bogus entry to a file, in either case we have to remove it
this._watched.delete(path);
this._watched.delete(fullPath);
const eventName = isDirectory ? EV_UNLINK_DIR : EV_UNLINK;
if (wasTracked && !this._isIgnored(path)) this._emit(eventName, path);
// Avoid conflicts if we later create another file with the same name
if (!this.options.useFsEvents) {
this._closePath(path);
}
}
/**
* Closes all watchers for a path
* @param {Path} path
*/
_closePath(path) {
this._closeFile(path);
const dir = sysPath.dirname(path);
this._getWatchedDir(dir).remove(sysPath.basename(path));
}
/**
* Closes only file-specific watchers
* @param {Path} path
*/
_closeFile(path) {
const closers = this._closers.get(path);
if (!closers) return;
closers.forEach(closer => closer());
this._closers.delete(path);
}
/**
*
* @param {Path} path
* @param {Function} closer
*/
_addPathCloser(path, closer) {
if (!closer) return;
let list = this._closers.get(path);
if (!list) {
list = [];
this._closers.set(path, list);
}
list.push(closer);
}
_readdirp(root, opts) {
if (this.closed) return;
const options = {type: EV_ALL, alwaysStat: true, lstat: true, ...opts};
let stream = readdirp(root, options);
this._streams.add(stream);
stream.once(STR_CLOSE, () => {
stream = undefined;
});
stream.once(STR_END, () => {
if (stream) {
this._streams.delete(stream);
stream = undefined;
}
});
return stream;
}
}
// Export FSWatcher class
chokidar.FSWatcher = FSWatcher;
/**
* Instantiates watcher with paths to be tracked.
* @param {String|Array<String>} paths file/directory paths and/or globs
* @param {Object=} options chokidar opts
* @returns an instance of FSWatcher for chaining.
*/
const watch = (paths, options) => {
const watcher = new FSWatcher(options);
watcher.add(paths);
return watcher;
};
chokidar.watch = watch;
var shellQuote$1 = {};
var quote = function quote(xs) {
return xs.map(function (s) {
if (s && typeof s === 'object') {
return s.op.replace(/(.)/g, '\\$1');
}
if ((/["\s]/).test(s) && !(/'/).test(s)) {
return "'" + s.replace(/(['\\])/g, '\\$1') + "'";
}
if ((/["'\s]/).test(s)) {
return '"' + s.replace(/(["\\$`!])/g, '\\$1') + '"';
}
return String(s).replace(/([A-Za-z]:)?([#!"$&'()*,:;<=>?@[\\\]^`{|}])/g, '$1\\$2');
}).join(' ');
};
// '<(' is process substitution operator and
// can be parsed the same as control operator
var CONTROL = '(?:' + [
'\\|\\|',
'\\&\\&',
';;',
'\\|\\&',
'\\<\\(',
'\\<\\<\\<',
'>>',
'>\\&',
'<\\&',
'[&;()|<>]'
].join('|') + ')';
var controlRE = new RegExp('^' + CONTROL + '$');
var META = '|&;()<> \\t';
var SINGLE_QUOTE = '"((\\\\"|[^"])*?)"';
var DOUBLE_QUOTE = '\'((\\\\\'|[^\'])*?)\'';
var hash = /^#$/;
var SQ = "'";
var DQ = '"';
var DS = '$';
var TOKEN = '';
var mult = 0x100000000; // Math.pow(16, 8);
for (var i = 0; i < 4; i++) {
TOKEN += (mult * Math.random()).toString(16);
}
var startsWithToken = new RegExp('^' + TOKEN);
function matchAll(s, r) {
var origIndex = r.lastIndex;
var matches = [];
var matchObj;
while ((matchObj = r.exec(s))) {
matches.push(matchObj);
if (r.lastIndex === matchObj.index) {
r.lastIndex += 1;
}
}
r.lastIndex = origIndex;
return matches;
}
function getVar(env, pre, key) {
var r = typeof env === 'function' ? env(key) : env[key];
if (typeof r === 'undefined' && key != '') {
r = '';
} else if (typeof r === 'undefined') {
r = '$';
}
if (typeof r === 'object') {
return pre + TOKEN + JSON.stringify(r) + TOKEN;
}
return pre + r;
}
function parseInternal(string, env, opts) {
if (!opts) {
opts = {};
}
var BS = opts.escape || '\\';
var BAREWORD = '(\\' + BS + '[\'"' + META + ']|[^\\s\'"' + META + '])+';
var chunker = new RegExp([
'(' + CONTROL + ')', // control chars
'(' + BAREWORD + '|' + SINGLE_QUOTE + '|' + DOUBLE_QUOTE + ')+'
].join('|'), 'g');
var matches = matchAll(string, chunker);
if (matches.length === 0) {
return [];
}
if (!env) {
env = {};
}
var commented = false;
return matches.map(function (match) {
var s = match[0];
if (!s || commented) {
return void undefined;
}
if (controlRE.test(s)) {
return { op: s };
}
// Hand-written scanner/parser for Bash quoting rules:
//
// 1. inside single quotes, all characters are printed literally.
// 2. inside double quotes, all characters are printed literally
// except variables prefixed by '$' and backslashes followed by
// either a double quote or another backslash.
// 3. outside of any quotes, backslashes are treated as escape
// characters and not printed (unless they are themselves escaped)
// 4. quote context can switch mid-token if there is no whitespace
// between the two quote contexts (e.g. all'one'"token" parses as
// "allonetoken")
var quote = false;
var esc = false;
var out = '';
var isGlob = false;
var i;
function parseEnvVar() {
i += 1;
var varend;
var varname;
var char = s.charAt(i);
if (char === '{') {
i += 1;
if (s.charAt(i) === '}') {
throw new Error('Bad substitution: ' + s.slice(i - 2, i + 1));
}
varend = s.indexOf('}', i);
if (varend < 0) {
throw new Error('Bad substitution: ' + s.slice(i));
}
varname = s.slice(i, varend);
i = varend;
} else if ((/[*@#?$!_-]/).test(char)) {
varname = char;
i += 1;
} else {
var slicedFromI = s.slice(i);
varend = slicedFromI.match(/[^\w\d_]/);
if (!varend) {
varname = slicedFromI;
i = s.length;
} else {
varname = slicedFromI.slice(0, varend.index);
i += varend.index - 1;
}
}
return getVar(env, '', varname);
}
for (i = 0; i < s.length; i++) {
var c = s.charAt(i);
isGlob = isGlob || (!quote && (c === '*' || c === '?'));
if (esc) {
out += c;
esc = false;
} else if (quote) {
if (c === quote) {
quote = false;
} else if (quote == SQ) {
out += c;
} else { // Double quote
if (c === BS) {
i += 1;
c = s.charAt(i);
if (c === DQ || c === BS || c === DS) {
out += c;
} else {
out += BS + c;
}
} else if (c === DS) {
out += parseEnvVar();
} else {
out += c;
}
}
} else if (c === DQ || c === SQ) {
quote = c;
} else if (controlRE.test(c)) {
return { op: s };
} else if (hash.test(c)) {
commented = true;
var commentObj = { comment: string.slice(match.index + i + 1) };
if (out.length) {
return [out, commentObj];
}
return [commentObj];
} else if (c === BS) {
esc = true;
} else if (c === DS) {
out += parseEnvVar();
} else {
out += c;
}
}
if (isGlob) {
return { op: 'glob', pattern: out };
}
return out;
}).reduce(function (prev, arg) { // finalize parsed arguments
// TODO: replace this whole reduce with a concat
return typeof arg === 'undefined' ? prev : prev.concat(arg);
}, []);
}
var parse$6 = function parse(s, env, opts) {
var mapped = parseInternal(s, env, opts);
if (typeof env !== 'function') {
return mapped;
}
return mapped.reduce(function (acc, s) {
if (typeof s === 'object') {
return acc.concat(s);
}
var xs = s.split(RegExp('(' + TOKEN + '.*?' + TOKEN + ')', 'g'));
if (xs.length === 1) {
return acc.concat(xs[0]);
}
return acc.concat(xs.filter(Boolean).map(function (x) {
if (startsWithToken.test(x)) {
return JSON.parse(x.split(TOKEN)[1]);
}
return x;
}));
}, []);
};
shellQuote$1.quote = quote;
shellQuote$1.parse = parse$6;
var macos = {
'/Applications/Atom.app/Contents/MacOS/Atom': 'atom',
'/Applications/Atom Beta.app/Contents/MacOS/Atom Beta':
'/Applications/Atom Beta.app/Contents/MacOS/Atom Beta',
'/Applications/Brackets.app/Contents/MacOS/Brackets': 'brackets',
'/Applications/Sublime Text.app/Contents/MacOS/Sublime Text':
'/Applications/Sublime Text.app/Contents/SharedSupport/bin/subl',
'/Applications/Sublime Text.app/Contents/MacOS/sublime_text':
'/Applications/Sublime Text.app/Contents/SharedSupport/bin/subl',
'/Applications/Sublime Text 2.app/Contents/MacOS/Sublime Text 2':
'/Applications/Sublime Text 2.app/Contents/SharedSupport/bin/subl',
'/Applications/Sublime Text Dev.app/Contents/MacOS/Sublime Text':
'/Applications/Sublime Text Dev.app/Contents/SharedSupport/bin/subl',
'/Applications/Visual Studio Code.app/Contents/MacOS/Electron': 'code',
'/Applications/Visual Studio Code - Insiders.app/Contents/MacOS/Electron':
'code-insiders',
'/Applications/VSCodium.app/Contents/MacOS/Electron': 'codium',
'/Applications/Cursor.app/Contents/MacOS/Cursor': 'cursor',
'/Applications/AppCode.app/Contents/MacOS/appcode':
'/Applications/AppCode.app/Contents/MacOS/appcode',
'/Applications/CLion.app/Contents/MacOS/clion':
'/Applications/CLion.app/Contents/MacOS/clion',
'/Applications/IntelliJ IDEA.app/Contents/MacOS/idea':
'/Applications/IntelliJ IDEA.app/Contents/MacOS/idea',
'/Applications/IntelliJ IDEA Ultimate.app/Contents/MacOS/idea':
'/Applications/IntelliJ IDEA Ultimate.app/Contents/MacOS/idea',
'/Applications/IntelliJ IDEA Community Edition.app/Contents/MacOS/idea':
'/Applications/IntelliJ IDEA Community Edition.app/Contents/MacOS/idea',
'/Applications/PhpStorm.app/Contents/MacOS/phpstorm':
'/Applications/PhpStorm.app/Contents/MacOS/phpstorm',
'/Applications/PyCharm.app/Contents/MacOS/pycharm':
'/Applications/PyCharm.app/Contents/MacOS/pycharm',
'/Applications/PyCharm CE.app/Contents/MacOS/pycharm':
'/Applications/PyCharm CE.app/Contents/MacOS/pycharm',
'/Applications/RubyMine.app/Contents/MacOS/rubymine':
'/Applications/RubyMine.app/Contents/MacOS/rubymine',
'/Applications/WebStorm.app/Contents/MacOS/webstorm':
'/Applications/WebStorm.app/Contents/MacOS/webstorm',
'/Applications/MacVim.app/Contents/MacOS/MacVim': 'mvim',
'/Applications/GoLand.app/Contents/MacOS/goland':
'/Applications/GoLand.app/Contents/MacOS/goland',
'/Applications/Rider.app/Contents/MacOS/rider':
'/Applications/Rider.app/Contents/MacOS/rider',
'/Applications/Zed.app/Contents/MacOS/zed': 'zed'
};
var linux = {
atom: 'atom',
Brackets: 'brackets',
'code-insiders': 'code-insiders',
code: 'code',
vscodium: 'vscodium',
codium: 'codium',
emacs: 'emacs',
gvim: 'gvim',
'idea.sh': 'idea',
'phpstorm.sh': 'phpstorm',
'pycharm.sh': 'pycharm',
'rubymine.sh': 'rubymine',
sublime_text: 'subl',
vim: 'vim',
'webstorm.sh': 'webstorm',
'goland.sh': 'goland',
'rider.sh': 'rider'
};
var windows$1 = [
'Brackets.exe',
'Code.exe',
'Code - Insiders.exe',
'VSCodium.exe',
'atom.exe',
'sublime_text.exe',
'notepad++.exe',
'clion.exe',
'clion64.exe',
'idea.exe',
'idea64.exe',
'phpstorm.exe',
'phpstorm64.exe',
'pycharm.exe',
'pycharm64.exe',
'rubymine.exe',
'rubymine64.exe',
'webstorm.exe',
'webstorm64.exe',
'goland.exe',
'goland64.exe',
'rider.exe',
'rider64.exe'
];
const path$7 = require$$0$4;
const shellQuote = shellQuote$1;
const childProcess$2 = require$$2$1;
// Map from full process name to binary that starts the process
// We can't just re-use full process name, because it will spawn a new instance
// of the app every time
const COMMON_EDITORS_MACOS = macos;
const COMMON_EDITORS_LINUX = linux;
const COMMON_EDITORS_WIN = windows$1;
var guess = function guessEditor (specifiedEditor) {
if (specifiedEditor) {
return shellQuote.parse(specifiedEditor)
}
if (process.env.LAUNCH_EDITOR) {
return [process.env.LAUNCH_EDITOR]
}
if (process.versions.webcontainer) {
return [process.env.EDITOR || 'code']
}
// We can find out which editor is currently running by:
// `ps x` on macOS and Linux
// `Get-Process` on Windows
try {
if (process.platform === 'darwin') {
const output = childProcess$2
.execSync('ps x -o comm=', {
stdio: ['pipe', 'pipe', 'ignore']
})
.toString();
const processNames = Object.keys(COMMON_EDITORS_MACOS);
const processList = output.split('\n');
for (let i = 0; i < processNames.length; i++) {
const processName = processNames[i];
// Find editor by exact match.
if (processList.includes(processName)) {
return [COMMON_EDITORS_MACOS[processName]]
}
const processNameWithoutApplications = processName.replace('/Applications', '');
// Find editor installation not in /Applications.
if (output.indexOf(processNameWithoutApplications) !== -1) {
// Use the CLI command if one is specified
if (processName !== COMMON_EDITORS_MACOS[processName]) {
return [COMMON_EDITORS_MACOS[processName]]
}
// Use a partial match to find the running process path. If one is found, use the
// existing path since it can be running from anywhere.
const runningProcess = processList.find((procName) => procName.endsWith(processNameWithoutApplications));
if (runningProcess !== undefined) {
return [runningProcess]
}
}
}
} else if (process.platform === 'win32') {
const output = childProcess$2
.execSync(
'powershell -NoProfile -Command "Get-CimInstance -Query \\"select executablepath from win32_process where executablepath is not null\\" | % { $_.ExecutablePath }"',
{
stdio: ['pipe', 'pipe', 'ignore']
}
)
.toString();
const runningProcesses = output.split('\r\n');
for (let i = 0; i < runningProcesses.length; i++) {
const fullProcessPath = runningProcesses[i].trim();
const shortProcessName = path$7.basename(fullProcessPath);
if (COMMON_EDITORS_WIN.indexOf(shortProcessName) !== -1) {
return [fullProcessPath]
}
}
} else if (process.platform === 'linux') {
// --no-heading No header line
// x List all processes owned by you
// -o comm Need only names column
const output = childProcess$2
.execSync('ps x --no-heading -o comm --sort=comm', {
stdio: ['pipe', 'pipe', 'ignore']
})
.toString();
const processNames = Object.keys(COMMON_EDITORS_LINUX);
for (let i = 0; i < processNames.length; i++) {
const processName = processNames[i];
if (output.indexOf(processName) !== -1) {
return [COMMON_EDITORS_LINUX[processName]]
}
}
}
} catch (ignoreError) {
// Ignore...
}
// Last resort, use old skool env vars
if (process.env.VISUAL) {
return [process.env.VISUAL]
} else if (process.env.EDITOR) {
return [process.env.EDITOR]
}
return [null]
};
const path$6 = require$$0$4;
// normalize file/line numbers into command line args for specific editors
var getArgs = function getArgumentsForPosition (
editor,
fileName,
lineNumber,
columnNumber = 1
) {
const editorBasename = path$6.basename(editor).replace(/\.(exe|cmd|bat)$/i, '');
switch (editorBasename) {
case 'atom':
case 'Atom':
case 'Atom Beta':
case 'subl':
case 'sublime':
case 'sublime_text':
case 'wstorm':
case 'charm':
case 'zed':
return [`${fileName}:${lineNumber}:${columnNumber}`]
case 'notepad++':
return ['-n' + lineNumber, '-c' + columnNumber, fileName]
case 'vim':
case 'mvim':
return [`+call cursor(${lineNumber}, ${columnNumber})`, fileName]
case 'joe':
case 'gvim':
return ['+' + `${lineNumber}`, fileName]
case 'emacs':
case 'emacsclient':
return [`+${lineNumber}:${columnNumber}`, fileName]
case 'rmate':
case 'mate':
case 'mine':
return ['--line', lineNumber, fileName]
case 'code':
case 'Code':
case 'code-insiders':
case 'Code - Insiders':
case 'codium':
case 'cursor':
case 'vscodium':
case 'VSCodium':
return ['-r', '-g', `${fileName}:${lineNumber}:${columnNumber}`]
case 'appcode':
case 'clion':
case 'clion64':
case 'idea':
case 'idea64':
case 'phpstorm':
case 'phpstorm64':
case 'pycharm':
case 'pycharm64':
case 'rubymine':
case 'rubymine64':
case 'webstorm':
case 'webstorm64':
case 'goland':
case 'goland64':
case 'rider':
case 'rider64':
return ['--line', lineNumber, '--column', columnNumber, fileName]
}
if (process.env.LAUNCH_EDITOR) {
return [fileName, lineNumber, columnNumber]
}
// For all others, drop the lineNumber until we have
// a mapping above, since providing the lineNumber incorrectly
// can result in errors or confusing behavior.
return [fileName]
};
/**
* Copyright (c) 2015-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file at
* https://github.com/facebookincubator/create-react-app/blob/master/LICENSE
*
* Modified by Yuxi Evan You
*/
const fs$4 = require$$0__default;
const os$1 = require$$2;
const path$5 = require$$0$4;
const colors = picocolorsExports;
const childProcess$1 = require$$2$1;
const guessEditor = guess;
const getArgumentsForPosition = getArgs;
function wrapErrorCallback (cb) {
return (fileName, errorMessage) => {
console.log();
console.log(
colors.red('Could not open ' + path$5.basename(fileName) + ' in the editor.')
);
if (errorMessage) {
if (errorMessage[errorMessage.length - 1] !== '.') {
errorMessage += '.';
}
console.log(
colors.red('The editor process exited with an error: ' + errorMessage)
);
}
console.log();
if (cb) cb(fileName, errorMessage);
}
}
function isTerminalEditor (editor) {
switch (editor) {
case 'vim':
case 'emacs':
case 'nano':
return true
}
return false
}
const positionRE = /:(\d+)(:(\d+))?$/;
function parseFile (file) {
const fileName = file.replace(positionRE, '');
const match = file.match(positionRE);
const lineNumber = match && match[1];
const columnNumber = match && match[3];
return {
fileName,
lineNumber,
columnNumber
}
}
let _childProcess = null;
function launchEditor (file, specifiedEditor, onErrorCallback) {
const parsed = parseFile(file);
let { fileName } = parsed;
const { lineNumber, columnNumber } = parsed;
if (!fs$4.existsSync(fileName)) {
return
}
if (typeof specifiedEditor === 'function') {
onErrorCallback = specifiedEditor;
specifiedEditor = undefined;
}
onErrorCallback = wrapErrorCallback(onErrorCallback);
const [editor, ...args] = guessEditor(specifiedEditor);
if (!editor) {
onErrorCallback(fileName, null);
return
}
if (
process.platform === 'linux' &&
fileName.startsWith('/mnt/') &&
/Microsoft/i.test(os$1.release())
) {
// Assume WSL / "Bash on Ubuntu on Windows" is being used, and
// that the file exists on the Windows file system.
// `os.release()` is "4.4.0-43-Microsoft" in the current release
// build of WSL, see: https://github.com/Microsoft/BashOnWindows/issues/423#issuecomment-221627364
// When a Windows editor is specified, interop functionality can
// handle the path translation, but only if a relative path is used.
fileName = path$5.relative('', fileName);
}
// cmd.exe on Windows is vulnerable to RCE attacks given a file name of the
// form "C:\Users\myusername\Downloads\& curl 172.21.93.52". Use a safe file
// name pattern to validate user-provided file names. This doesn't cover the
// entire range of valid file names but should cover almost all of them in practice.
// (Backport of
// https://github.com/facebook/create-react-app/pull/4866
// and
// https://github.com/facebook/create-react-app/pull/5431)
// Allows alphanumeric characters, periods, dashes, slashes, underscores, plus and space.
const WINDOWS_CMD_SAFE_FILE_NAME_PATTERN = /^([A-Za-z]:[/\\])?[\p{L}0-9/.\-\\_+ ]+$/u;
if (
process.platform === 'win32' &&
!WINDOWS_CMD_SAFE_FILE_NAME_PATTERN.test(fileName.trim())
) {
console.log();
console.log(
colors.red('Could not open ' + path$5.basename(fileName) + ' in the editor.')
);
console.log();
console.log(
'When running on Windows, file names are checked against a safe file name ' +
'pattern to protect against remote code execution attacks. File names ' +
'may consist only of alphanumeric characters (all languages), periods, ' +
'dashes, slashes, and underscores.'
);
console.log();
return
}
if (lineNumber) {
const extraArgs = getArgumentsForPosition(editor, fileName, lineNumber, columnNumber);
args.push.apply(args, extraArgs);
} else {
args.push(fileName);
}
if (_childProcess && isTerminalEditor(editor)) {
// There's an existing editor process already and it's attached
// to the terminal, so go kill it. Otherwise two separate editor
// instances attach to the stdin/stdout which gets confusing.
_childProcess.kill('SIGKILL');
}
if (process.platform === 'win32') {
// On Windows, launch the editor in a shell because spawn can only
// launch .exe files.
_childProcess = childProcess$1.spawn(
'cmd.exe',
['/C', editor].concat(args),
{ stdio: 'inherit' }
);
} else {
_childProcess = childProcess$1.spawn(editor, args, { stdio: 'inherit' });
}
_childProcess.on('exit', function (errorCode) {
_childProcess = null;
if (errorCode) {
onErrorCallback(fileName, '(code ' + errorCode + ')');
}
});
_childProcess.on('error', function (error) {
let { code, message } = error;
if ('ENOENT' === code) {
message = `${message} ('${editor}' command does not exist in 'PATH')`;
}
onErrorCallback(fileName, message);
});
}
var launchEditor_1 = launchEditor;
const url$2 = require$$0$9;
const path$4 = require$$0$4;
const launch = launchEditor_1;
var launchEditorMiddleware = (specifiedEditor, srcRoot, onErrorCallback) => {
if (typeof specifiedEditor === 'function') {
onErrorCallback = specifiedEditor;
specifiedEditor = undefined;
}
if (typeof srcRoot === 'function') {
onErrorCallback = srcRoot;
srcRoot = undefined;
}
srcRoot = srcRoot || process.cwd();
return function launchEditorMiddleware (req, res) {
const { file } = url$2.parse(req.url, true).query || {};
if (!file) {
res.statusCode = 500;
res.end(`launch-editor-middleware: required query param "file" is missing.`);
} else {
launch(path$4.resolve(srcRoot, file), specifiedEditor, onErrorCallback);
res.end();
}
}
};
var launchEditorMiddleware$1 = /*@__PURE__*/getDefaultExportFromCjs(launchEditorMiddleware);
async function resolveHttpServer({ proxy }, app, httpsOptions) {
if (!httpsOptions) {
const { createServer } = await import('node:http');
return createServer(app);
}
if (proxy) {
const { createServer } = await import('node:https');
return createServer(httpsOptions, app);
} else {
const { createSecureServer } = await import('node:http2');
return createSecureServer(
{
// Manually increase the session memory to prevent 502 ENHANCE_YOUR_CALM
// errors on large numbers of requests
maxSessionMemory: 1e3,
...httpsOptions,
allowHTTP1: true
},
// @ts-expect-error TODO: is this correct?
app
);
}
}
async function resolveHttpsConfig(https) {
if (!https) return void 0;
const [ca, cert, key, pfx] = await Promise.all([
readFileIfExists(https.ca),
readFileIfExists(https.cert),
readFileIfExists(https.key),
readFileIfExists(https.pfx)
]);
return { ...https, ca, cert, key, pfx };
}
async function readFileIfExists(value) {
if (typeof value === "string") {
return fsp.readFile(path$n.resolve(value)).catch(() => value);
}
return value;
}
async function httpServerStart(httpServer, serverOptions) {
let { port, strictPort, host, logger } = serverOptions;
return new Promise((resolve, reject) => {
const onError = (e) => {
if (e.code === "EADDRINUSE") {
if (strictPort) {
httpServer.removeListener("error", onError);
reject(new Error(`Port ${port} is already in use`));
} else {
logger.info(`Port ${port} is in use, trying another one...`);
httpServer.listen(++port, host);
}
} else {
httpServer.removeListener("error", onError);
reject(e);
}
};
httpServer.on("error", onError);
httpServer.listen(port, host, () => {
httpServer.removeListener("error", onError);
resolve(port);
});
});
}
function setClientErrorHandler(server, logger) {
server.on("clientError", (err, socket) => {
let msg = "400 Bad Request";
if (err.code === "HPE_HEADER_OVERFLOW") {
msg = "431 Request Header Fields Too Large";
logger.warn(
colors$1.yellow(
"Server responded with status code 431. See https://vitejs.dev/guide/troubleshooting.html#_431-request-header-fields-too-large."
)
);
}
if (err.code === "ECONNRESET" || !socket.writable) {
return;
}
socket.end(`HTTP/1.1 ${msg}\r
\r
`);
});
}
const commonFsUtils = {
existsSync: fs__default.existsSync,
isDirectory,
tryResolveRealFile,
tryResolveRealFileWithExtensions,
tryResolveRealFileOrType
};
const cachedFsUtilsMap = /* @__PURE__ */ new WeakMap();
function getFsUtils(config) {
let fsUtils = cachedFsUtilsMap.get(config);
if (!fsUtils) {
if (config.command !== "serve" || config.server.fs.cachedChecks !== true || config.server.watch?.ignored || process.versions.pnp) {
fsUtils = commonFsUtils;
} else if (!config.resolve.preserveSymlinks && config.root !== getRealPath(config.root)) {
fsUtils = commonFsUtils;
} else {
fsUtils = createCachedFsUtils(config);
}
cachedFsUtilsMap.set(config, fsUtils);
}
return fsUtils;
}
function readDirCacheSync(file) {
let dirents;
try {
dirents = fs__default.readdirSync(file, { withFileTypes: true });
} catch {
return;
}
return direntsToDirentMap(dirents);
}
function direntsToDirentMap(fsDirents) {
const dirents = /* @__PURE__ */ new Map();
for (const dirent of fsDirents) {
const type = dirent.isDirectory() ? "directory" : dirent.isSymbolicLink() ? "symlink" : dirent.isFile() ? "file" : void 0;
if (type) {
dirents.set(dirent.name, { type });
}
}
return dirents;
}
function ensureFileMaybeSymlinkIsResolved(direntCache, filePath) {
if (direntCache.type !== "file_maybe_symlink") return;
const isSymlink = fs__default.lstatSync(filePath, { throwIfNoEntry: false })?.isSymbolicLink();
direntCache.type = isSymlink === void 0 ? "error" : isSymlink ? "symlink" : "file";
}
function pathUntilPart(root, parts, i) {
let p = root;
for (let k = 0; k < i; k++) p += "/" + parts[k];
return p;
}
function createCachedFsUtils(config) {
const root = config.root;
const rootDirPath = `${root}/`;
const rootCache = { type: "directory" };
const getDirentCacheSync = (parts) => {
let direntCache = rootCache;
for (let i = 0; i < parts.length; i++) {
if (direntCache.type === "directory") {
let dirPath;
if (!direntCache.dirents) {
dirPath = pathUntilPart(root, parts, i);
const dirents = readDirCacheSync(dirPath);
if (!dirents) {
direntCache.type = "error";
return;
}
direntCache.dirents = dirents;
}
const nextDirentCache = direntCache.dirents.get(parts[i]);
if (!nextDirentCache) {
return;
}
if (nextDirentCache.type === "directory_maybe_symlink") {
dirPath ??= pathUntilPart(root, parts, i + 1);
const isSymlink = fs__default.lstatSync(dirPath, { throwIfNoEntry: false })?.isSymbolicLink();
nextDirentCache.type = isSymlink ? "symlink" : "directory";
}
direntCache = nextDirentCache;
} else if (direntCache.type === "symlink") {
return direntCache;
} else if (direntCache.type === "error") {
return direntCache;
} else {
if (i !== parts.length - 1) {
return;
}
if (direntCache.type === "file_maybe_symlink") {
ensureFileMaybeSymlinkIsResolved(
direntCache,
pathUntilPart(root, parts, i)
);
return direntCache;
} else if (direntCache.type === "file") {
return direntCache;
} else {
return;
}
}
}
return direntCache;
};
function getDirentCacheFromPath(normalizedFile) {
if (normalizedFile[normalizedFile.length - 1] === "/") {
normalizedFile = normalizedFile.slice(0, -1);
}
if (normalizedFile === root) {
return rootCache;
}
if (!normalizedFile.startsWith(rootDirPath)) {
return void 0;
}
const pathFromRoot = normalizedFile.slice(rootDirPath.length);
const parts = pathFromRoot.split("/");
const direntCache = getDirentCacheSync(parts);
if (!direntCache || direntCache.type === "error") {
return false;
}
return direntCache;
}
function onPathAdd(file, type) {
const direntCache = getDirentCacheFromPath(
normalizePath$3(path$n.dirname(file))
);
if (direntCache && direntCache.type === "directory" && direntCache.dirents) {
direntCache.dirents.set(path$n.basename(file), { type });
}
}
function onPathUnlink(file) {
const direntCache = getDirentCacheFromPath(
normalizePath$3(path$n.dirname(file))
);
if (direntCache && direntCache.type === "directory" && direntCache.dirents) {
direntCache.dirents.delete(path$n.basename(file));
}
}
return {
existsSync(file) {
if (isInNodeModules$1(file)) {
return fs__default.existsSync(file);
}
const normalizedFile = normalizePath$3(file);
const direntCache = getDirentCacheFromPath(normalizedFile);
if (direntCache === void 0 || direntCache && direntCache.type === "symlink") {
return fs__default.existsSync(file);
}
return !!direntCache;
},
tryResolveRealFile(file, preserveSymlinks) {
if (isInNodeModules$1(file)) {
return tryResolveRealFile(file, preserveSymlinks);
}
const normalizedFile = normalizePath$3(file);
const direntCache = getDirentCacheFromPath(normalizedFile);
if (direntCache === void 0 || direntCache && direntCache.type === "symlink") {
return tryResolveRealFile(file, preserveSymlinks);
}
if (!direntCache || direntCache.type === "directory") {
return;
}
return normalizedFile;
},
tryResolveRealFileWithExtensions(file, extensions, preserveSymlinks) {
if (isInNodeModules$1(file)) {
return tryResolveRealFileWithExtensions(
file,
extensions,
preserveSymlinks
);
}
const normalizedFile = normalizePath$3(file);
const dirPath = path$n.posix.dirname(normalizedFile);
const direntCache = getDirentCacheFromPath(dirPath);
if (direntCache === void 0 || direntCache && direntCache.type === "symlink") {
return tryResolveRealFileWithExtensions(
file,
extensions,
preserveSymlinks
);
}
if (!direntCache || direntCache.type !== "directory") {
return;
}
if (!direntCache.dirents) {
const dirents = readDirCacheSync(dirPath);
if (!dirents) {
direntCache.type = "error";
return;
}
direntCache.dirents = dirents;
}
const base = path$n.posix.basename(normalizedFile);
for (const ext of extensions) {
const fileName = base + ext;
const fileDirentCache = direntCache.dirents.get(fileName);
if (fileDirentCache) {
const filePath = dirPath + "/" + fileName;
ensureFileMaybeSymlinkIsResolved(fileDirentCache, filePath);
if (fileDirentCache.type === "symlink") {
return tryResolveRealFile(filePath, preserveSymlinks);
}
if (fileDirentCache.type === "file") {
return filePath;
}
}
}
},
tryResolveRealFileOrType(file, preserveSymlinks) {
if (isInNodeModules$1(file)) {
return tryResolveRealFileOrType(file, preserveSymlinks);
}
const normalizedFile = normalizePath$3(file);
const direntCache = getDirentCacheFromPath(normalizedFile);
if (direntCache === void 0 || direntCache && direntCache.type === "symlink") {
return tryResolveRealFileOrType(file, preserveSymlinks);
}
if (!direntCache) {
return;
}
if (direntCache.type === "directory") {
return { type: "directory" };
}
return { path: normalizedFile, type: "file" };
},
isDirectory(dirPath) {
if (isInNodeModules$1(dirPath)) {
return isDirectory(dirPath);
}
const direntCache = getDirentCacheFromPath(normalizePath$3(dirPath));
if (direntCache === void 0 || direntCache && direntCache.type === "symlink") {
return isDirectory(dirPath);
}
return direntCache && direntCache.type === "directory";
},
initWatcher(watcher) {
watcher.on("add", (file) => {
onPathAdd(file, "file_maybe_symlink");
});
watcher.on("addDir", (dir) => {
onPathAdd(dir, "directory_maybe_symlink");
});
watcher.on("unlink", onPathUnlink);
watcher.on("unlinkDir", onPathUnlink);
}
};
}
function tryResolveRealFile(file, preserveSymlinks) {
const stat = tryStatSync(file);
if (stat?.isFile()) return getRealPath(file, preserveSymlinks);
}
function tryResolveRealFileWithExtensions(filePath, extensions, preserveSymlinks) {
for (const ext of extensions) {
const res = tryResolveRealFile(filePath + ext, preserveSymlinks);
if (res) return res;
}
}
function tryResolveRealFileOrType(file, preserveSymlinks) {
const fileStat = tryStatSync(file);
if (fileStat?.isFile()) {
return { path: getRealPath(file, preserveSymlinks), type: "file" };
}
if (fileStat?.isDirectory()) {
return { type: "directory" };
}
return;
}
function getRealPath(resolved, preserveSymlinks) {
if (!preserveSymlinks) {
resolved = safeRealpathSync(resolved);
}
return normalizePath$3(resolved);
}
function isDirectory(path2) {
const stat = tryStatSync(path2);
return stat?.isDirectory() ?? false;
}
/*!
* etag
* Copyright(c) 2014-2016 Douglas Christopher Wilson
* MIT Licensed
*/
/**
* Module exports.
* @public
*/
var etag_1 = etag;
/**
* Module dependencies.
* @private
*/
var crypto = require$$3$1;
var Stats = require$$0__default.Stats;
/**
* Module variables.
* @private
*/
var toString = Object.prototype.toString;
/**
* Generate an entity tag.
*
* @param {Buffer|string} entity
* @return {string}
* @private
*/
function entitytag (entity) {
if (entity.length === 0) {
// fast-path empty
return '"0-2jmj7l5rSw0yVb/vlWAYkK/YBwk"'
}
// compute hash of entity
var hash = crypto
.createHash('sha1')
.update(entity, 'utf8')
.digest('base64')
.substring(0, 27);
// compute length of entity
var len = typeof entity === 'string'
? Buffer.byteLength(entity, 'utf8')
: entity.length;
return '"' + len.toString(16) + '-' + hash + '"'
}
/**
* Create a simple ETag.
*
* @param {string|Buffer|Stats} entity
* @param {object} [options]
* @param {boolean} [options.weak]
* @return {String}
* @public
*/
function etag (entity, options) {
if (entity == null) {
throw new TypeError('argument entity is required')
}
// support fs.Stats object
var isStats = isstats(entity);
var weak = options && typeof options.weak === 'boolean'
? options.weak
: isStats;
// validate argument
if (!isStats && typeof entity !== 'string' && !Buffer.isBuffer(entity)) {
throw new TypeError('argument entity must be string, Buffer, or fs.Stats')
}
// generate entity tag
var tag = isStats
? stattag(entity)
: entitytag(entity);
return weak
? 'W/' + tag
: tag
}
/**
* Determine if object is a Stats object.
*
* @param {object} obj
* @return {boolean}
* @api private
*/
function isstats (obj) {
// genuine fs.Stats
if (typeof Stats === 'function' && obj instanceof Stats) {
return true
}
// quack quack
return obj && typeof obj === 'object' &&
'ctime' in obj && toString.call(obj.ctime) === '[object Date]' &&
'mtime' in obj && toString.call(obj.mtime) === '[object Date]' &&
'ino' in obj && typeof obj.ino === 'number' &&
'size' in obj && typeof obj.size === 'number'
}
/**
* Generate a tag for a stat.
*
* @param {object} stat
* @return {string}
* @private
*/
function stattag (stat) {
var mtime = stat.mtime.getTime().toString(16);
var size = stat.size.toString(16);
return '"' + size + '-' + mtime + '"'
}
var getEtag = /*@__PURE__*/getDefaultExportFromCjs(etag_1);
function e(e,n,r){throw new Error(r?`No known conditions for "${n}" specifier in "${e}" package`:`Missing "${n}" specifier in "${e}" package`)}function n(n,i,o,f){let s,u,l=r(n,o),c=function(e){let n=new Set(["default",...e.conditions||[]]);return e.unsafe||n.add(e.require?"require":"import"),e.unsafe||n.add(e.browser?"browser":"node"),n}(f||{}),a=i[l];if(void 0===a){let e,n,r,t;for(t in i)n&&t.length<n.length||("/"===t[t.length-1]&&l.startsWith(t)?(u=l.substring(t.length),n=t):t.length>1&&(r=t.indexOf("*",1),~r&&(e=RegExp("^"+t.substring(0,r)+"(.*)"+t.substring(1+r)).exec(l),e&&e[1]&&(u=e[1],n=t))));a=i[n];}return a||e(n,l),s=t(a,c),s||e(n,l,1),u&&function(e,n){let r,t=0,i=e.length,o=/[*]/g,f=/[/]$/;for(;t<i;t++)e[t]=o.test(r=e[t])?r.replace(o,n):f.test(r)?r+n:r;}(s,u),s}function r(e,n,r){if(e===n||"."===n)return ".";let t=e+"/",i=t.length,o=n.slice(0,i)===t,f=o?n.slice(i):n;return "#"===f[0]?f:o||!r?"./"===f.slice(0,2)?f:"./"+f:f}function t(e,n,r){if(e){if("string"==typeof e)return r&&r.add(e),[e];let i,o;if(Array.isArray(e)){for(o=r||new Set,i=0;i<e.length;i++)t(e[i],n,o);if(!r&&o.size)return [...o]}else for(i in e)if(n.has(i))return t(e[i],n,r)}}function o(e,r,t){let i,o=e.exports;if(o){if("string"==typeof o)o={".":o};else for(i in o){"."!==i[0]&&(o={".":o});break}return n(e.name,o,r||".",t)}}function f(e,r,t){if(e.imports)return n(e.name,e.imports,r,t)}
const normalizedClientEntry$1 = normalizePath$3(CLIENT_ENTRY);
const normalizedEnvEntry$1 = normalizePath$3(ENV_ENTRY);
const ERR_RESOLVE_PACKAGE_ENTRY_FAIL = "ERR_RESOLVE_PACKAGE_ENTRY_FAIL";
const browserExternalId = "__vite-browser-external";
const optionalPeerDepId = "__vite-optional-peer-dep";
const subpathImportsPrefix = "#";
const startsWithWordCharRE = /^\w/;
const debug$c = createDebugger("vite:resolve-details", {
onlyWhenFocused: true
});
function resolvePlugin(resolveOptions) {
const {
root,
isProduction,
asSrc,
ssrConfig,
preferRelative = false
} = resolveOptions;
const {
target: ssrTarget,
noExternal: ssrNoExternal,
external: ssrExternal
} = ssrConfig ?? {};
const rootInRoot = tryStatSync(path$n.join(root, root))?.isDirectory() ?? false;
return {
name: "vite:resolve",
async resolveId(id, importer, resolveOpts) {
if (id[0] === "\0" || id.startsWith("virtual:") || // When injected directly in html/client code
id.startsWith("/virtual:")) {
return;
}
const ssr = resolveOpts?.ssr === true;
const depsOptimizer = resolveOptions.getDepsOptimizer?.(ssr);
if (id.startsWith(browserExternalId)) {
return id;
}
const targetWeb = !ssr || ssrTarget === "webworker";
const isRequire = resolveOpts?.custom?.["node-resolve"]?.isRequire ?? false;
const ssrConditions = resolveOptions.ssrConfig?.resolve?.conditions || resolveOptions.conditions;
const options = {
isRequire,
...resolveOptions,
scan: resolveOpts?.scan ?? resolveOptions.scan,
conditions: ssr ? ssrConditions : resolveOptions.conditions
};
const resolvedImports = resolveSubpathImports(
id,
importer,
options,
targetWeb
);
if (resolvedImports) {
id = resolvedImports;
if (resolveOpts.custom?.["vite:import-glob"]?.isSubImportsPattern) {
return normalizePath$3(path$n.join(root, id));
}
}
if (importer) {
if (isTsRequest(importer) || resolveOpts.custom?.depScan?.loader?.startsWith("ts")) {
options.isFromTsImporter = true;
} else {
const moduleLang = this.getModuleInfo(importer)?.meta?.vite?.lang;
options.isFromTsImporter = moduleLang && isTsRequest(`.${moduleLang}`);
}
}
let res;
if (asSrc && depsOptimizer?.isOptimizedDepUrl(id)) {
const optimizedPath = id.startsWith(FS_PREFIX) ? fsPathFromId(id) : normalizePath$3(path$n.resolve(root, id.slice(1)));
return optimizedPath;
}
if (asSrc && id.startsWith(FS_PREFIX)) {
res = fsPathFromId(id);
debug$c?.(`[@fs] ${colors$1.cyan(id)} -> ${colors$1.dim(res)}`);
return ensureVersionQuery(res, id, options, depsOptimizer);
}
if (asSrc && id[0] === "/" && (rootInRoot || !id.startsWith(withTrailingSlash(root)))) {
const fsPath = path$n.resolve(root, id.slice(1));
if (res = tryFsResolve(fsPath, options)) {
debug$c?.(`[url] ${colors$1.cyan(id)} -> ${colors$1.dim(res)}`);
return ensureVersionQuery(res, id, options, depsOptimizer);
}
}
if (id[0] === "." || (preferRelative || importer?.endsWith(".html")) && startsWithWordCharRE.test(id)) {
const basedir = importer ? path$n.dirname(importer) : process.cwd();
const fsPath = path$n.resolve(basedir, id);
const normalizedFsPath = normalizePath$3(fsPath);
if (depsOptimizer?.isOptimizedDepFile(normalizedFsPath)) {
if (!resolveOptions.isBuild && !DEP_VERSION_RE.test(normalizedFsPath)) {
const browserHash = optimizedDepInfoFromFile(
depsOptimizer.metadata,
normalizedFsPath
)?.browserHash;
if (browserHash) {
return injectQuery(normalizedFsPath, `v=${browserHash}`);
}
}
return normalizedFsPath;
}
if (targetWeb && options.mainFields.includes("browser") && (res = tryResolveBrowserMapping(fsPath, importer, options, true))) {
return res;
}
if (res = tryFsResolve(fsPath, options)) {
res = ensureVersionQuery(res, id, options, depsOptimizer);
debug$c?.(`[relative] ${colors$1.cyan(id)} -> ${colors$1.dim(res)}`);
if (!options.idOnly && !options.scan && options.isBuild && !importer?.endsWith(".html")) {
const resPkg = findNearestPackageData(
path$n.dirname(res),
options.packageCache
);
if (resPkg) {
return {
id: res,
moduleSideEffects: resPkg.hasSideEffects(res)
};
}
}
return res;
}
}
if (isWindows$3 && id[0] === "/") {
const basedir = importer ? path$n.dirname(importer) : process.cwd();
const fsPath = path$n.resolve(basedir, id);
if (res = tryFsResolve(fsPath, options)) {
debug$c?.(`[drive-relative] ${colors$1.cyan(id)} -> ${colors$1.dim(res)}`);
return ensureVersionQuery(res, id, options, depsOptimizer);
}
}
if (isNonDriveRelativeAbsolutePath(id) && (res = tryFsResolve(id, options))) {
debug$c?.(`[fs] ${colors$1.cyan(id)} -> ${colors$1.dim(res)}`);
return ensureVersionQuery(res, id, options, depsOptimizer);
}
if (isExternalUrl(id)) {
return options.idOnly ? id : { id, external: true };
}
if (isDataUrl(id)) {
return null;
}
if (bareImportRE.test(id)) {
const external = options.shouldExternalize?.(id, importer);
if (!external && asSrc && depsOptimizer && !options.scan && (res = await tryOptimizedResolve(
depsOptimizer,
id,
importer,
options.preserveSymlinks,
options.packageCache
))) {
return res;
}
if (targetWeb && options.mainFields.includes("browser") && (res = tryResolveBrowserMapping(
id,
importer,
options,
false,
external
))) {
return res;
}
if (res = tryNodeResolve(
id,
importer,
options,
targetWeb,
depsOptimizer,
ssr,
external
)) {
return res;
}
if (isBuiltin(id)) {
if (ssr) {
if (targetWeb && ssrNoExternal === true && // if both noExternal and external are true, noExternal will take the higher priority and bundle it.
// only if the id is explicitly listed in external, we will externalize it and skip this error.
(ssrExternal === true || !ssrExternal?.includes(id))) {
let message = `Cannot bundle Node.js built-in "${id}"`;
if (importer) {
message += ` imported from "${path$n.relative(
process.cwd(),
importer
)}"`;
}
message += `. Consider disabling ssr.noExternal or remove the built-in dependency.`;
this.error(message);
}
return options.idOnly ? id : { id, external: true, moduleSideEffects: false };
} else {
if (!asSrc) {
debug$c?.(
`externalized node built-in "${id}" to empty module. (imported by: ${colors$1.white(colors$1.dim(importer))})`
);
} else if (isProduction) {
this.warn(
`Module "${id}" has been externalized for browser compatibility, imported by "${importer}". See https://vitejs.dev/guide/troubleshooting.html#module-externalized-for-browser-compatibility for more details.`
);
}
return isProduction ? browserExternalId : `${browserExternalId}:${id}`;
}
}
}
debug$c?.(`[fallthrough] ${colors$1.dim(id)}`);
},
load(id) {
if (id.startsWith(browserExternalId)) {
if (isProduction) {
return `export default {}`;
} else {
id = id.slice(browserExternalId.length + 1);
return `export default new Proxy({}, {
get(_, key) {
throw new Error(\`Module "${id}" has been externalized for browser compatibility. Cannot access "${id}.\${key}" in client code. See https://vitejs.dev/guide/troubleshooting.html#module-externalized-for-browser-compatibility for more details.\`)
}
})`;
}
}
if (id.startsWith(optionalPeerDepId)) {
if (isProduction) {
return `export default {}`;
} else {
const [, peerDep, parentDep] = id.split(":");
return `throw new Error(\`Could not resolve "${peerDep}" imported by "${parentDep}". Is it installed?\`)`;
}
}
}
};
}
function resolveSubpathImports(id, importer, options, targetWeb) {
if (!importer || !id.startsWith(subpathImportsPrefix)) return;
const basedir = path$n.dirname(importer);
const pkgData = findNearestPackageData(basedir, options.packageCache);
if (!pkgData) return;
let { file: idWithoutPostfix, postfix } = splitFileAndPostfix(id.slice(1));
idWithoutPostfix = "#" + idWithoutPostfix;
let importsPath = resolveExportsOrImports(
pkgData.data,
idWithoutPostfix,
options,
targetWeb,
"imports"
);
if (importsPath?.[0] === ".") {
importsPath = path$n.relative(basedir, path$n.join(pkgData.dir, importsPath));
if (importsPath[0] !== ".") {
importsPath = `./${importsPath}`;
}
}
return importsPath + postfix;
}
function ensureVersionQuery(resolved, id, options, depsOptimizer) {
if (!options.isBuild && !options.scan && depsOptimizer && !(resolved === normalizedClientEntry$1 || resolved === normalizedEnvEntry$1)) {
const isNodeModule = isInNodeModules$1(id) || isInNodeModules$1(resolved);
if (isNodeModule && !DEP_VERSION_RE.test(resolved)) {
const versionHash = depsOptimizer.metadata.browserHash;
if (versionHash && isOptimizable(resolved, depsOptimizer.options)) {
resolved = injectQuery(resolved, `v=${versionHash}`);
}
}
}
return resolved;
}
function splitFileAndPostfix(path2) {
const file = cleanUrl(path2);
return { file, postfix: path2.slice(file.length) };
}
function tryFsResolve(fsPath, options, tryIndex = true, targetWeb = true, skipPackageJson = false) {
const hashIndex = fsPath.indexOf("#");
if (hashIndex >= 0 && isInNodeModules$1(fsPath)) {
const queryIndex = fsPath.indexOf("?");
if (queryIndex < 0 || queryIndex > hashIndex) {
const file2 = queryIndex > hashIndex ? fsPath.slice(0, queryIndex) : fsPath;
const res2 = tryCleanFsResolve(
file2,
options,
tryIndex,
targetWeb,
skipPackageJson
);
if (res2) return res2 + fsPath.slice(file2.length);
}
}
const { file, postfix } = splitFileAndPostfix(fsPath);
const res = tryCleanFsResolve(
file,
options,
tryIndex,
targetWeb,
skipPackageJson
);
if (res) return res + postfix;
}
const knownTsOutputRE = /\.(?:js|mjs|cjs|jsx)$/;
const isPossibleTsOutput = (url) => knownTsOutputRE.test(url);
function tryCleanFsResolve(file, options, tryIndex = true, targetWeb = true, skipPackageJson = false) {
const { tryPrefix, extensions, preserveSymlinks } = options;
const fsUtils = options.fsUtils ?? commonFsUtils;
const fileResult = fsUtils.tryResolveRealFileOrType(
file,
options.preserveSymlinks
);
if (fileResult?.path) return fileResult.path;
let res;
const possibleJsToTs = options.isFromTsImporter && isPossibleTsOutput(file);
if (possibleJsToTs || options.extensions.length || tryPrefix) {
const dirPath = path$n.dirname(file);
if (fsUtils.isDirectory(dirPath)) {
if (possibleJsToTs) {
const fileExt = path$n.extname(file);
const fileName = file.slice(0, -fileExt.length);
if (res = fsUtils.tryResolveRealFile(
fileName + fileExt.replace("js", "ts"),
preserveSymlinks
))
return res;
if (fileExt === ".js" && (res = fsUtils.tryResolveRealFile(
fileName + ".tsx",
preserveSymlinks
)))
return res;
}
if (res = fsUtils.tryResolveRealFileWithExtensions(
file,
extensions,
preserveSymlinks
))
return res;
if (tryPrefix) {
const prefixed = `${dirPath}/${options.tryPrefix}${path$n.basename(file)}`;
if (res = fsUtils.tryResolveRealFile(prefixed, preserveSymlinks))
return res;
if (res = fsUtils.tryResolveRealFileWithExtensions(
prefixed,
extensions,
preserveSymlinks
))
return res;
}
}
}
if (tryIndex && fileResult?.type === "directory") {
const dirPath = file;
if (!skipPackageJson) {
let pkgPath = `${dirPath}/package.json`;
try {
if (fsUtils.existsSync(pkgPath)) {
if (!options.preserveSymlinks) {
pkgPath = safeRealpathSync(pkgPath);
}
const pkg = loadPackageData(pkgPath);
return resolvePackageEntry(dirPath, pkg, targetWeb, options);
}
} catch (e) {
if (e.code !== ERR_RESOLVE_PACKAGE_ENTRY_FAIL && e.code !== "ENOENT")
throw e;
}
}
if (res = fsUtils.tryResolveRealFileWithExtensions(
`${dirPath}/index`,
extensions,
preserveSymlinks
))
return res;
if (tryPrefix) {
if (res = fsUtils.tryResolveRealFileWithExtensions(
`${dirPath}/${options.tryPrefix}index`,
extensions,
preserveSymlinks
))
return res;
}
}
}
function tryNodeResolve(id, importer, options, targetWeb, depsOptimizer, ssr = false, externalize, allowLinkedExternal = true) {
const { root, dedupe, isBuild, preserveSymlinks, packageCache } = options;
const deepMatch = deepImportRE.exec(id);
const pkgId = deepMatch ? deepMatch[1] || deepMatch[2] : cleanUrl(id);
let basedir;
if (dedupe?.includes(pkgId)) {
basedir = root;
} else if (importer && path$n.isAbsolute(importer) && // css processing appends `*` for importer
(importer[importer.length - 1] === "*" || fs__default.existsSync(cleanUrl(importer)))) {
basedir = path$n.dirname(importer);
} else {
basedir = root;
}
let selfPkg = null;
if (!isBuiltin(id) && !id.includes("\0") && bareImportRE.test(id)) {
const selfPackageData = findNearestPackageData(basedir, packageCache);
selfPkg = selfPackageData?.data.exports && selfPackageData?.data.name === pkgId ? selfPackageData : null;
}
const pkg = selfPkg || resolvePackageData(pkgId, basedir, preserveSymlinks, packageCache);
if (!pkg) {
if (basedir !== root && // root has no peer dep
!isBuiltin(id) && !id.includes("\0") && bareImportRE.test(id)) {
const mainPkg = findNearestMainPackageData(basedir, packageCache)?.data;
if (mainPkg) {
const pkgName = getNpmPackageName(id);
if (pkgName != null && mainPkg.peerDependencies?.[pkgName] && mainPkg.peerDependenciesMeta?.[pkgName]?.optional) {
return {
id: `${optionalPeerDepId}:${id}:${mainPkg.name}`
};
}
}
}
return;
}
const resolveId = deepMatch ? resolveDeepImport : resolvePackageEntry;
const unresolvedId = deepMatch ? "." + id.slice(pkgId.length) : id;
let resolved;
try {
resolved = resolveId(unresolvedId, pkg, targetWeb, options);
} catch (err) {
if (!options.tryEsmOnly) {
throw err;
}
}
if (!resolved && options.tryEsmOnly) {
resolved = resolveId(unresolvedId, pkg, targetWeb, {
...options,
isRequire: false,
mainFields: DEFAULT_MAIN_FIELDS,
extensions: DEFAULT_EXTENSIONS
});
}
if (!resolved) {
return;
}
const processResult = (resolved2) => {
if (!externalize) {
return resolved2;
}
if (!allowLinkedExternal && !isInNodeModules$1(resolved2.id)) {
return resolved2;
}
const resolvedExt = path$n.extname(resolved2.id);
if (resolvedExt && resolvedExt !== ".js" && resolvedExt !== ".mjs" && resolvedExt !== ".cjs") {
return resolved2;
}
let resolvedId = id;
if (deepMatch && !pkg?.data.exports && path$n.extname(id) !== resolvedExt) {
const index = resolved2.id.indexOf(id);
if (index > -1) {
resolvedId = resolved2.id.slice(index);
debug$c?.(
`[processResult] ${colors$1.cyan(id)} -> ${colors$1.dim(resolvedId)}`
);
}
}
return { ...resolved2, id: resolvedId, external: true };
};
if (!options.idOnly && (!options.scan && isBuild && !depsOptimizer || externalize)) {
return processResult({
id: resolved,
moduleSideEffects: pkg.hasSideEffects(resolved)
});
}
if (!options.ssrOptimizeCheck && (!isInNodeModules$1(resolved) || // linked
!depsOptimizer || // resolving before listening to the server
options.scan)) {
return { id: resolved };
}
const isJsType = depsOptimizer ? isOptimizable(resolved, depsOptimizer.options) : OPTIMIZABLE_ENTRY_RE.test(resolved);
let exclude = depsOptimizer?.options.exclude;
let include = depsOptimizer?.options.include;
if (options.ssrOptimizeCheck) {
exclude = options.ssrConfig?.optimizeDeps?.exclude;
include = options.ssrConfig?.optimizeDeps?.include;
}
const skipOptimization = !options.ssrOptimizeCheck && depsOptimizer?.options.noDiscovery || !isJsType || importer && isInNodeModules$1(importer) || exclude?.includes(pkgId) || exclude?.includes(id) || SPECIAL_QUERY_RE.test(resolved) || // During dev SSR, we don't have a way to reload the module graph if
// a non-optimized dep is found. So we need to skip optimization here.
// The only optimized deps are the ones explicitly listed in the config.
!options.ssrOptimizeCheck && !isBuild && ssr || // Only optimize non-external CJS deps during SSR by default
ssr && isFilePathESM(resolved, options.packageCache) && !(include?.includes(pkgId) || include?.includes(id));
if (options.ssrOptimizeCheck) {
return {
id: skipOptimization ? injectQuery(resolved, `__vite_skip_optimization`) : resolved
};
}
if (skipOptimization) {
if (!isBuild) {
const versionHash = depsOptimizer.metadata.browserHash;
if (versionHash && isJsType) {
resolved = injectQuery(resolved, `v=${versionHash}`);
}
}
} else {
const optimizedInfo = depsOptimizer.registerMissingImport(id, resolved);
resolved = depsOptimizer.getOptimizedDepId(optimizedInfo);
}
if (!options.idOnly && !options.scan && isBuild) {
return {
id: resolved,
moduleSideEffects: pkg.hasSideEffects(resolved)
};
} else {
return { id: resolved };
}
}
async function tryOptimizedResolve(depsOptimizer, id, importer, preserveSymlinks, packageCache) {
await depsOptimizer.scanProcessing;
const metadata = depsOptimizer.metadata;
const depInfo = optimizedDepInfoFromId(metadata, id);
if (depInfo) {
return depsOptimizer.getOptimizedDepId(depInfo);
}
if (!importer) return;
let idPkgDir;
const nestedIdMatch = `> ${id}`;
for (const optimizedData of metadata.depInfoList) {
if (!optimizedData.src) continue;
if (!optimizedData.id.endsWith(nestedIdMatch)) continue;
if (idPkgDir == null) {
const pkgName = getNpmPackageName(id);
if (!pkgName) break;
idPkgDir = resolvePackageData(
pkgName,
importer,
preserveSymlinks,
packageCache
)?.dir;
if (idPkgDir == null) break;
idPkgDir = normalizePath$3(idPkgDir);
}
if (optimizedData.src.startsWith(withTrailingSlash(idPkgDir))) {
return depsOptimizer.getOptimizedDepId(optimizedData);
}
}
}
function resolvePackageEntry(id, { dir, data, setResolvedCache, getResolvedCache }, targetWeb, options) {
const { file: idWithoutPostfix, postfix } = splitFileAndPostfix(id);
const cached = getResolvedCache(".", targetWeb);
if (cached) {
return cached + postfix;
}
try {
let entryPoint;
if (data.exports) {
entryPoint = resolveExportsOrImports(
data,
".",
options,
targetWeb,
"exports"
);
}
if (!entryPoint) {
for (const field of options.mainFields) {
if (field === "browser") {
if (targetWeb) {
entryPoint = tryResolveBrowserEntry(dir, data, options);
if (entryPoint) {
break;
}
}
} else if (typeof data[field] === "string") {
entryPoint = data[field];
break;
}
}
}
entryPoint ||= data.main;
const entryPoints = entryPoint ? [entryPoint] : ["index.js", "index.json", "index.node"];
for (let entry of entryPoints) {
let skipPackageJson = false;
if (options.mainFields[0] === "sass" && !options.extensions.includes(path$n.extname(entry))) {
entry = "";
skipPackageJson = true;
} else {
const { browser: browserField } = data;
if (targetWeb && options.mainFields.includes("browser") && isObject$1(browserField)) {
entry = mapWithBrowserField(entry, browserField) || entry;
}
}
const entryPointPath = path$n.join(dir, entry);
const resolvedEntryPoint = tryFsResolve(
entryPointPath,
options,
true,
true,
skipPackageJson
);
if (resolvedEntryPoint) {
debug$c?.(
`[package entry] ${colors$1.cyan(idWithoutPostfix)} -> ${colors$1.dim(
resolvedEntryPoint
)}${postfix !== "" ? ` (postfix: ${postfix})` : ""}`
);
setResolvedCache(".", resolvedEntryPoint, targetWeb);
return resolvedEntryPoint + postfix;
}
}
} catch (e) {
packageEntryFailure(id, e.message);
}
packageEntryFailure(id);
}
function packageEntryFailure(id, details) {
const err = new Error(
`Failed to resolve entry for package "${id}". The package may have incorrect main/module/exports specified in its package.json` + (details ? ": " + details : ".")
);
err.code = ERR_RESOLVE_PACKAGE_ENTRY_FAIL;
throw err;
}
function resolveExportsOrImports(pkg, key, options, targetWeb, type) {
const additionalConditions = new Set(
options.overrideConditions || [
"production",
"development",
"module",
...options.conditions
]
);
const conditions = [...additionalConditions].filter((condition) => {
switch (condition) {
case "production":
return options.isProduction;
case "development":
return !options.isProduction;
}
return true;
});
const fn = type === "imports" ? f : o;
const result = fn(pkg, key, {
browser: targetWeb && !additionalConditions.has("node"),
require: options.isRequire && !additionalConditions.has("import"),
conditions
});
return result ? result[0] : void 0;
}
function resolveDeepImport(id, {
webResolvedImports,
setResolvedCache,
getResolvedCache,
dir,
data
}, targetWeb, options) {
const cache = getResolvedCache(id, targetWeb);
if (cache) {
return cache;
}
let relativeId = id;
const { exports: exportsField, browser: browserField } = data;
if (exportsField) {
if (isObject$1(exportsField) && !Array.isArray(exportsField)) {
const { file, postfix } = splitFileAndPostfix(relativeId);
const exportsId = resolveExportsOrImports(
data,
file,
options,
targetWeb,
"exports"
);
if (exportsId !== void 0) {
relativeId = exportsId + postfix;
} else {
relativeId = void 0;
}
} else {
relativeId = void 0;
}
if (!relativeId) {
throw new Error(
`Package subpath '${relativeId}' is not defined by "exports" in ${path$n.join(dir, "package.json")}.`
);
}
} else if (targetWeb && options.mainFields.includes("browser") && isObject$1(browserField)) {
const { file, postfix } = splitFileAndPostfix(relativeId);
const mapped = mapWithBrowserField(file, browserField);
if (mapped) {
relativeId = mapped + postfix;
} else if (mapped === false) {
return webResolvedImports[id] = browserExternalId;
}
}
if (relativeId) {
const resolved = tryFsResolve(
path$n.join(dir, relativeId),
options,
!exportsField,
// try index only if no exports field
targetWeb
);
if (resolved) {
debug$c?.(
`[node/deep-import] ${colors$1.cyan(id)} -> ${colors$1.dim(resolved)}`
);
setResolvedCache(id, resolved, targetWeb);
return resolved;
}
}
}
function tryResolveBrowserMapping(id, importer, options, isFilePath, externalize) {
let res;
const pkg = importer && findNearestPackageData(path$n.dirname(importer), options.packageCache);
if (pkg && isObject$1(pkg.data.browser)) {
const mapId = isFilePath ? "./" + slash$1(path$n.relative(pkg.dir, id)) : id;
const browserMappedPath = mapWithBrowserField(mapId, pkg.data.browser);
if (browserMappedPath) {
if (res = bareImportRE.test(browserMappedPath) ? tryNodeResolve(browserMappedPath, importer, options, true)?.id : tryFsResolve(path$n.join(pkg.dir, browserMappedPath), options)) {
debug$c?.(`[browser mapped] ${colors$1.cyan(id)} -> ${colors$1.dim(res)}`);
let result = { id: res };
if (options.idOnly) {
return result;
}
if (!options.scan && options.isBuild) {
const resPkg = findNearestPackageData(
path$n.dirname(res),
options.packageCache
);
if (resPkg) {
result = {
id: res,
moduleSideEffects: resPkg.hasSideEffects(res)
};
}
}
return externalize ? { ...result, external: true } : result;
}
} else if (browserMappedPath === false) {
return browserExternalId;
}
}
}
function tryResolveBrowserEntry(dir, data, options) {
const browserEntry = typeof data.browser === "string" ? data.browser : isObject$1(data.browser) && data.browser["."];
if (browserEntry) {
if (!options.isRequire && options.mainFields.includes("module") && typeof data.module === "string" && data.module !== browserEntry) {
const resolvedBrowserEntry = tryFsResolve(
path$n.join(dir, browserEntry),
options
);
if (resolvedBrowserEntry) {
const content = fs__default.readFileSync(resolvedBrowserEntry, "utf-8");
if (hasESMSyntax(content)) {
return browserEntry;
} else {
return data.module;
}
}
} else {
return browserEntry;
}
}
}
function mapWithBrowserField(relativePathInPkgDir, map) {
const normalizedPath = path$n.posix.normalize(relativePathInPkgDir);
for (const key in map) {
const normalizedKey = path$n.posix.normalize(key);
if (normalizedPath === normalizedKey || equalWithoutSuffix(normalizedPath, normalizedKey, ".js") || equalWithoutSuffix(normalizedPath, normalizedKey, "/index.js")) {
return map[key];
}
}
}
function equalWithoutSuffix(path2, key, suffix) {
return key.endsWith(suffix) && key.slice(0, -suffix.length) === path2;
}
const externalWithConversionNamespace = "vite:dep-pre-bundle:external-conversion";
const convertedExternalPrefix = "vite-dep-pre-bundle-external:";
const cjsExternalFacadeNamespace = "vite:cjs-external-facade";
const nonFacadePrefix = "vite-cjs-external-facade:";
const externalTypes = [
"css",
// supported pre-processor types
"less",
"sass",
"scss",
"styl",
"stylus",
"pcss",
"postcss",
// wasm
"wasm",
// known SFC types
"vue",
"svelte",
"marko",
"astro",
"imba",
// JSX/TSX may be configured to be compiled differently from how esbuild
// handles it by default, so exclude them as well
"jsx",
"tsx",
...KNOWN_ASSET_TYPES
];
function esbuildDepPlugin(qualified, external, config, ssr) {
const { extensions } = getDepOptimizationConfig(config, ssr);
const allExternalTypes = extensions ? externalTypes.filter((type) => !extensions?.includes("." + type)) : externalTypes;
const esmPackageCache = /* @__PURE__ */ new Map();
const cjsPackageCache = /* @__PURE__ */ new Map();
const _resolve = config.createResolver({
asSrc: false,
scan: true,
packageCache: esmPackageCache
});
const _resolveRequire = config.createResolver({
asSrc: false,
isRequire: true,
scan: true,
packageCache: cjsPackageCache
});
const resolve = (id, importer, kind, resolveDir) => {
let _importer;
{
_importer = importer in qualified ? qualified[importer] : importer;
}
const resolver = kind.startsWith("require") ? _resolveRequire : _resolve;
return resolver(id, _importer, void 0, ssr);
};
const resolveResult = (id, resolved) => {
if (resolved.startsWith(browserExternalId)) {
return {
path: id,
namespace: "browser-external"
};
}
if (resolved.startsWith(optionalPeerDepId)) {
return {
path: resolved,
namespace: "optional-peer-dep"
};
}
if (ssr && isBuiltin(resolved)) {
return;
}
if (isExternalUrl(resolved)) {
return {
path: resolved,
external: true
};
}
return {
path: path$n.resolve(resolved)
};
};
return {
name: "vite:dep-pre-bundle",
setup(build) {
build.onEnd(() => {
esmPackageCache.clear();
cjsPackageCache.clear();
});
build.onResolve(
{
filter: new RegExp(
`\\.(` + allExternalTypes.join("|") + `)(\\?.*)?$`
)
},
async ({ path: id, importer, kind }) => {
if (id.startsWith(convertedExternalPrefix)) {
return {
path: id.slice(convertedExternalPrefix.length),
external: true
};
}
const resolved = await resolve(id, importer, kind);
if (resolved) {
if (kind === "require-call") {
if (resolved.endsWith(".js")) {
return {
path: resolved,
external: false
};
}
return {
path: resolved,
namespace: externalWithConversionNamespace
};
}
return {
path: resolved,
external: true
};
}
}
);
build.onLoad(
{ filter: /./, namespace: externalWithConversionNamespace },
(args) => {
const modulePath = `"${convertedExternalPrefix}${args.path}"`;
return {
contents: isCSSRequest(args.path) && !isModuleCSSRequest(args.path) ? `import ${modulePath};` : `export { default } from ${modulePath};export * from ${modulePath};`,
loader: "js"
};
}
);
function resolveEntry(id) {
const flatId = flattenId(id);
if (flatId in qualified) {
return {
path: qualified[flatId]
};
}
}
build.onResolve(
{ filter: /^[\w@][^:]/ },
async ({ path: id, importer, kind }) => {
if (moduleListContains(external, id)) {
return {
path: id,
external: true
};
}
let entry;
if (!importer) {
if (entry = resolveEntry(id)) return entry;
const aliased = await _resolve(id, void 0, true);
if (aliased && (entry = resolveEntry(aliased))) {
return entry;
}
}
const resolved = await resolve(id, importer, kind);
if (resolved) {
return resolveResult(id, resolved);
}
}
);
build.onLoad(
{ filter: /.*/, namespace: "browser-external" },
({ path: path2 }) => {
if (config.isProduction) {
return {
contents: "module.exports = {}"
};
} else {
return {
// Return in CJS to intercept named imports. Use `Object.create` to
// create the Proxy in the prototype to workaround esbuild issue. Why?
//
// In short, esbuild cjs->esm flow:
// 1. Create empty object using `Object.create(Object.getPrototypeOf(module.exports))`.
// 2. Assign props of `module.exports` to the object.
// 3. Return object for ESM use.
//
// If we do `module.exports = new Proxy({}, {})`, step 1 returns empty object,
// step 2 does nothing as there's no props for `module.exports`. The final object
// is just an empty object.
//
// Creating the Proxy in the prototype satisfies step 1 immediately, which means
// the returned object is a Proxy that we can intercept.
//
// Note: Skip keys that are accessed by esbuild and browser devtools.
contents: `module.exports = Object.create(new Proxy({}, {
get(_, key) {
if (
key !== '__esModule' &&
key !== '__proto__' &&
key !== 'constructor' &&
key !== 'splice'
) {
console.warn(\`Module "${path2}" has been externalized for browser compatibility. Cannot access "${path2}.\${key}" in client code. See https://vitejs.dev/guide/troubleshooting.html#module-externalized-for-browser-compatibility for more details.\`)
}
}
}))`
};
}
}
);
build.onLoad(
{ filter: /.*/, namespace: "optional-peer-dep" },
({ path: path2 }) => {
if (config.isProduction) {
return {
contents: "module.exports = {}"
};
} else {
const [, peerDep, parentDep] = path2.split(":");
return {
contents: `throw new Error(\`Could not resolve "${peerDep}" imported by "${parentDep}". Is it installed?\`)`
};
}
}
);
}
};
}
const matchesEntireLine = (text) => `^${escapeRegex(text)}$`;
function esbuildCjsExternalPlugin(externals, platform) {
return {
name: "cjs-external",
setup(build) {
const filter = new RegExp(externals.map(matchesEntireLine).join("|"));
build.onResolve({ filter: new RegExp(`^${nonFacadePrefix}`) }, (args) => {
return {
path: args.path.slice(nonFacadePrefix.length),
external: true
};
});
build.onResolve({ filter }, (args) => {
if (args.kind === "require-call" && platform !== "node") {
return {
path: args.path,
namespace: cjsExternalFacadeNamespace
};
}
return {
path: args.path,
external: true
};
});
build.onLoad(
{ filter: /.*/, namespace: cjsExternalFacadeNamespace },
(args) => ({
contents: `import * as m from ${JSON.stringify(
nonFacadePrefix + args.path
)};module.exports = m;`
})
);
}
};
}
const debug$b = createDebugger("vite:ssr-external");
const isSsrExternalCache = /* @__PURE__ */ new WeakMap();
function shouldExternalizeForSSR(id, importer, config) {
let isSsrExternal = isSsrExternalCache.get(config);
if (!isSsrExternal) {
isSsrExternal = createIsSsrExternal(config);
isSsrExternalCache.set(config, isSsrExternal);
}
return isSsrExternal(id, importer);
}
function createIsConfiguredAsSsrExternal(config) {
const { ssr, root } = config;
const noExternal = ssr?.noExternal;
const noExternalFilter = noExternal !== "undefined" && typeof noExternal !== "boolean" && createFilter(void 0, noExternal, { resolve: false });
const targetConditions = config.ssr.resolve?.externalConditions || [];
const resolveOptions = {
...config.resolve,
root,
isProduction: false,
isBuild: true,
conditions: targetConditions
};
const isExternalizable = (id, importer, configuredAsExternal) => {
if (!bareImportRE.test(id) || id.includes("\0")) {
return false;
}
try {
return !!tryNodeResolve(
id,
// Skip passing importer in build to avoid externalizing non-hoisted dependencies
// unresolvable from root (which would be unresolvable from output bundles also)
config.command === "build" ? void 0 : importer,
resolveOptions,
ssr?.target === "webworker",
void 0,
true,
// try to externalize, will return undefined or an object without
// a external flag if it isn't externalizable
true,
// Allow linked packages to be externalized if they are explicitly
// configured as external
!!configuredAsExternal
)?.external;
} catch (e) {
debug$b?.(
`Failed to node resolve "${id}". Skipping externalizing it by default.`
);
return false;
}
};
return (id, importer) => {
if (
// If this id is defined as external, force it as external
// Note that individual package entries are allowed in ssr.external
ssr.external !== true && ssr.external?.includes(id)
) {
return true;
}
const pkgName = getNpmPackageName(id);
if (!pkgName) {
return isExternalizable(id, importer);
}
if (
// A package name in ssr.external externalizes every
// externalizable package entry
ssr.external !== true && ssr.external?.includes(pkgName)
) {
return isExternalizable(id, importer, true);
}
if (typeof noExternal === "boolean") {
return !noExternal;
}
if (noExternalFilter && !noExternalFilter(pkgName)) {
return false;
}
return isExternalizable(id, importer, ssr.external === true);
};
}
function createIsSsrExternal(config) {
const processedIds = /* @__PURE__ */ new Map();
const isConfiguredAsExternal = createIsConfiguredAsSsrExternal(config);
return (id, importer) => {
if (processedIds.has(id)) {
return processedIds.get(id);
}
let external = false;
if (id[0] !== "." && !path$n.isAbsolute(id)) {
external = isBuiltin(id) || isConfiguredAsExternal(id, importer);
}
processedIds.set(id, external);
return external;
};
}
const jsonExtRE = /\.json(?:$|\?)(?!commonjs-(?:proxy|external))/;
const jsonLangs = `\\.(?:json|json5)(?:$|\\?)`;
const jsonLangRE = new RegExp(jsonLangs);
const isJSONRequest = (request) => jsonLangRE.test(request);
function jsonPlugin(options = {}, isBuild) {
return {
name: "vite:json",
transform(json, id) {
if (!jsonExtRE.test(id)) return null;
if (SPECIAL_QUERY_RE.test(id)) return null;
json = stripBomTag(json);
try {
if (options.stringify) {
if (isBuild) {
return {
// during build, parse then double-stringify to remove all
// unnecessary whitespaces to reduce bundle size.
code: `export default JSON.parse(${JSON.stringify(
JSON.stringify(JSON.parse(json))
)})`,
map: { mappings: "" }
};
} else {
return `export default JSON.parse(${JSON.stringify(json)})`;
}
}
const parsed = JSON.parse(json);
return {
code: dataToEsm(parsed, {
preferConst: true,
namedExports: options.namedExports
}),
map: { mappings: "" }
};
} catch (e) {
const position = extractJsonErrorPosition(e.message, json.length);
const msg = position ? `, invalid JSON syntax found at position ${position}` : `.`;
this.error(`Failed to parse JSON file` + msg, position);
}
}
};
}
function extractJsonErrorPosition(errorMessage, inputLength) {
if (errorMessage.startsWith("Unexpected end of JSON input")) {
return inputLength - 1;
}
const errorMessageList = /at position (\d+)/.exec(errorMessage);
return errorMessageList ? Math.max(parseInt(errorMessageList[1], 10) - 1, 0) : void 0;
}
const ERR_OPTIMIZE_DEPS_PROCESSING_ERROR = "ERR_OPTIMIZE_DEPS_PROCESSING_ERROR";
const ERR_OUTDATED_OPTIMIZED_DEP = "ERR_OUTDATED_OPTIMIZED_DEP";
const ERR_FILE_NOT_FOUND_IN_OPTIMIZED_DEP_DIR = "ERR_FILE_NOT_FOUND_IN_OPTIMIZED_DEP_DIR";
const debug$a = createDebugger("vite:optimize-deps");
function optimizedDepsPlugin(config) {
return {
name: "vite:optimized-deps",
resolveId(id, source, { ssr }) {
if (getDepsOptimizer(config, ssr)?.isOptimizedDepFile(id)) {
return id;
}
},
// this.load({ id }) isn't implemented in PluginContainer
// The logic to register an id to wait until it is processed
// is in importAnalysis, see call to delayDepsOptimizerUntil
async load(id, options) {
const ssr = options?.ssr === true;
const depsOptimizer = getDepsOptimizer(config, ssr);
if (depsOptimizer?.isOptimizedDepFile(id)) {
const metadata = depsOptimizer.metadata;
const file = cleanUrl(id);
const versionMatch = DEP_VERSION_RE.exec(file);
const browserHash = versionMatch ? versionMatch[1].split("=")[1] : void 0;
const info = optimizedDepInfoFromFile(metadata, file);
if (info) {
if (browserHash && info.browserHash !== browserHash) {
throwOutdatedRequest(id);
}
try {
await info.processing;
} catch {
throwProcessingError(id);
}
const newMetadata = depsOptimizer.metadata;
if (metadata !== newMetadata) {
const currentInfo = optimizedDepInfoFromFile(newMetadata, file);
if (info.browserHash !== currentInfo?.browserHash) {
throwOutdatedRequest(id);
}
}
}
debug$a?.(`load ${colors$1.cyan(file)}`);
try {
return await fsp.readFile(file, "utf-8");
} catch (e) {
const newMetadata = depsOptimizer.metadata;
if (optimizedDepInfoFromFile(newMetadata, file)) {
throwOutdatedRequest(id);
}
throwFileNotFoundInOptimizedDep(id);
}
}
}
};
}
function throwProcessingError(id) {
const err = new Error(
`Something unexpected happened while optimizing "${id}". The current page should have reloaded by now`
);
err.code = ERR_OPTIMIZE_DEPS_PROCESSING_ERROR;
throw err;
}
function throwOutdatedRequest(id) {
const err = new Error(
`There is a new version of the pre-bundle for "${id}", a page reload is going to ask for it.`
);
err.code = ERR_OUTDATED_OPTIMIZED_DEP;
throw err;
}
function throwFileNotFoundInOptimizedDep(id) {
const err = new Error(
`The file does not exist at "${id}" which is in the optimize deps directory. The dependency might be incompatible with the dep optimizer. Try adding it to \`optimizeDeps.exclude\`.`
);
err.code = ERR_FILE_NOT_FOUND_IN_OPTIMIZED_DEP_DIR;
throw err;
}
const nonJsRe = /\.json(?:$|\?)/;
const isNonJsRequest = (request) => nonJsRe.test(request);
const importMetaEnvMarker = "__vite_import_meta_env__";
const importMetaEnvKeyReCache = /* @__PURE__ */ new Map();
function definePlugin(config) {
const isBuild = config.command === "build";
const isBuildLib = isBuild && config.build.lib;
const processEnv = {};
if (!isBuildLib) {
const nodeEnv = process.env.NODE_ENV || config.mode;
Object.assign(processEnv, {
"process.env": `{}`,
"global.process.env": `{}`,
"globalThis.process.env": `{}`,
"process.env.NODE_ENV": JSON.stringify(nodeEnv),
"global.process.env.NODE_ENV": JSON.stringify(nodeEnv),
"globalThis.process.env.NODE_ENV": JSON.stringify(nodeEnv)
});
}
const importMetaKeys = {};
const importMetaEnvKeys = {};
const importMetaFallbackKeys = {};
if (isBuild) {
importMetaKeys["import.meta.hot"] = `undefined`;
for (const key in config.env) {
const val = JSON.stringify(config.env[key]);
importMetaKeys[`import.meta.env.${key}`] = val;
importMetaEnvKeys[key] = val;
}
importMetaKeys["import.meta.env.SSR"] = `undefined`;
importMetaFallbackKeys["import.meta.env"] = `undefined`;
}
const userDefine = {};
const userDefineEnv = {};
for (const key in config.define) {
userDefine[key] = handleDefineValue(config.define[key]);
if (isBuild && key.startsWith("import.meta.env.")) {
userDefineEnv[key.slice(16)] = config.define[key];
}
}
function generatePattern(ssr) {
const replaceProcessEnv = !ssr || config.ssr?.target === "webworker";
const define = {
...replaceProcessEnv ? processEnv : {},
...importMetaKeys,
...userDefine,
...importMetaFallbackKeys
};
if ("import.meta.env.SSR" in define) {
define["import.meta.env.SSR"] = ssr + "";
}
if ("import.meta.env" in define) {
define["import.meta.env"] = importMetaEnvMarker;
}
const importMetaEnvVal = serializeDefine({
...importMetaEnvKeys,
SSR: ssr + "",
...userDefineEnv
});
const patternKeys = Object.keys(userDefine);
if (replaceProcessEnv && Object.keys(processEnv).length) {
patternKeys.push("process.env");
}
if (Object.keys(importMetaKeys).length) {
patternKeys.push("import.meta.env", "import.meta.hot");
}
const pattern = patternKeys.length ? new RegExp(patternKeys.map(escapeRegex).join("|")) : null;
return [define, pattern, importMetaEnvVal];
}
const defaultPattern = generatePattern(false);
const ssrPattern = generatePattern(true);
return {
name: "vite:define",
async transform(code, id, options) {
const ssr = options?.ssr === true;
if (!ssr && !isBuild) {
return;
}
if (
// exclude html, css and static assets for performance
isHTMLRequest(id) || isCSSRequest(id) || isNonJsRequest(id) || config.assetsInclude(id)
) {
return;
}
let [define, pattern, importMetaEnvVal] = ssr ? ssrPattern : defaultPattern;
if (!pattern) return;
pattern.lastIndex = 0;
if (!pattern.test(code)) return;
const hasDefineImportMetaEnv = "import.meta.env" in define;
let marker = importMetaEnvMarker;
if (hasDefineImportMetaEnv && code.includes(marker)) {
let i = 1;
do {
marker = importMetaEnvMarker + i++;
} while (code.includes(marker));
if (marker !== importMetaEnvMarker) {
define = { ...define, "import.meta.env": marker };
}
}
const result = await replaceDefine(code, id, define, config);
if (hasDefineImportMetaEnv) {
result.code = result.code.replaceAll(
getImportMetaEnvKeyRe(marker),
(m) => "undefined".padEnd(m.length)
);
if (result.code.includes(marker)) {
result.code = `const ${marker} = ${importMetaEnvVal};
` + result.code;
if (result.map) {
const map = JSON.parse(result.map);
map.mappings = ";" + map.mappings;
result.map = map;
}
}
}
return result;
}
};
}
async function replaceDefine(code, id, define, config) {
const esbuildOptions = config.esbuild || {};
const result = await transform$1(code, {
loader: "js",
charset: esbuildOptions.charset ?? "utf8",
platform: "neutral",
define,
sourcefile: id,
sourcemap: config.command === "build" ? !!config.build.sourcemap : true
});
if (result.map.includes("<define:")) {
const originalMap = new TraceMap(result.map);
if (originalMap.sources.length >= 2) {
const sourceIndex = originalMap.sources.indexOf(id);
const decoded = decodedMap(originalMap);
decoded.sources = [id];
decoded.mappings = decoded.mappings.map(
(segments) => segments.filter((segment) => {
const index = segment[1];
segment[1] = 0;
return index === sourceIndex;
})
);
result.map = JSON.stringify(encodedMap(new TraceMap(decoded)));
}
}
return {
code: result.code,
map: result.map || null
};
}
function serializeDefine(define) {
let res = `{`;
const keys = Object.keys(define).sort();
for (let i = 0; i < keys.length; i++) {
const key = keys[i];
const val = define[key];
res += `${JSON.stringify(key)}: ${handleDefineValue(val)}`;
if (i !== keys.length - 1) {
res += `, `;
}
}
return res + `}`;
}
function handleDefineValue(value) {
if (typeof value === "undefined") return "undefined";
if (typeof value === "string") return value;
return JSON.stringify(value);
}
function getImportMetaEnvKeyRe(marker) {
let re = importMetaEnvKeyReCache.get(marker);
if (!re) {
re = new RegExp(`${marker}\\..+?\\b`, "g");
importMetaEnvKeyReCache.set(marker, re);
}
return re;
}
const normalizedClientEntry = normalizePath$3(CLIENT_ENTRY);
const normalizedEnvEntry = normalizePath$3(ENV_ENTRY);
function clientInjectionsPlugin(config) {
let injectConfigValues;
return {
name: "vite:client-inject",
async buildStart() {
const resolvedServerHostname = (await resolveHostname(config.server.host)).name;
const resolvedServerPort = config.server.port;
const devBase = config.base;
const serverHost = `${resolvedServerHostname}:${resolvedServerPort}${devBase}`;
let hmrConfig = config.server.hmr;
hmrConfig = isObject$1(hmrConfig) ? hmrConfig : void 0;
const host = hmrConfig?.host || null;
const protocol = hmrConfig?.protocol || null;
const timeout = hmrConfig?.timeout || 3e4;
const overlay = hmrConfig?.overlay !== false;
const isHmrServerSpecified = !!hmrConfig?.server;
const hmrConfigName = path$n.basename(config.configFile || "vite.config.js");
let port = hmrConfig?.clientPort || hmrConfig?.port || null;
if (config.server.middlewareMode && !isHmrServerSpecified) {
port ||= 24678;
}
let directTarget = hmrConfig?.host || resolvedServerHostname;
directTarget += `:${hmrConfig?.port || resolvedServerPort}`;
directTarget += devBase;
let hmrBase = devBase;
if (hmrConfig?.path) {
hmrBase = path$n.posix.join(hmrBase, hmrConfig.path);
}
const userDefine = {};
for (const key in config.define) {
if (!key.startsWith("import.meta.env.")) {
userDefine[key] = config.define[key];
}
}
const serializedDefines = serializeDefine(userDefine);
const modeReplacement = escapeReplacement(config.mode);
const baseReplacement = escapeReplacement(devBase);
const definesReplacement = () => serializedDefines;
const serverHostReplacement = escapeReplacement(serverHost);
const hmrProtocolReplacement = escapeReplacement(protocol);
const hmrHostnameReplacement = escapeReplacement(host);
const hmrPortReplacement = escapeReplacement(port);
const hmrDirectTargetReplacement = escapeReplacement(directTarget);
const hmrBaseReplacement = escapeReplacement(hmrBase);
const hmrTimeoutReplacement = escapeReplacement(timeout);
const hmrEnableOverlayReplacement = escapeReplacement(overlay);
const hmrConfigNameReplacement = escapeReplacement(hmrConfigName);
injectConfigValues = (code) => {
return code.replace(`__MODE__`, modeReplacement).replace(/__BASE__/g, baseReplacement).replace(`__DEFINES__`, definesReplacement).replace(`__SERVER_HOST__`, serverHostReplacement).replace(`__HMR_PROTOCOL__`, hmrProtocolReplacement).replace(`__HMR_HOSTNAME__`, hmrHostnameReplacement).replace(`__HMR_PORT__`, hmrPortReplacement).replace(`__HMR_DIRECT_TARGET__`, hmrDirectTargetReplacement).replace(`__HMR_BASE__`, hmrBaseReplacement).replace(`__HMR_TIMEOUT__`, hmrTimeoutReplacement).replace(`__HMR_ENABLE_OVERLAY__`, hmrEnableOverlayReplacement).replace(`__HMR_CONFIG_NAME__`, hmrConfigNameReplacement);
};
},
async transform(code, id, options) {
if (id === normalizedClientEntry || id === normalizedEnvEntry) {
return injectConfigValues(code);
} else if (!options?.ssr && code.includes("process.env.NODE_ENV")) {
const nodeEnv = config.define?.["process.env.NODE_ENV"] || JSON.stringify(process.env.NODE_ENV || config.mode);
return await replaceDefine(
code,
id,
{
"process.env.NODE_ENV": nodeEnv,
"global.process.env.NODE_ENV": nodeEnv,
"globalThis.process.env.NODE_ENV": nodeEnv
},
config
);
}
}
};
}
function escapeReplacement(value) {
const jsonValue = JSON.stringify(value);
return () => jsonValue;
}
const wasmHelperId = "\0vite/wasm-helper.js";
const wasmHelper = async (opts = {}, url) => {
let result;
if (url.startsWith("data:")) {
const urlContent = url.replace(/^data:.*?base64,/, "");
let bytes;
if (typeof Buffer === "function" && typeof Buffer.from === "function") {
bytes = Buffer.from(urlContent, "base64");
} else if (typeof atob === "function") {
const binaryString = atob(urlContent);
bytes = new Uint8Array(binaryString.length);
for (let i = 0; i < binaryString.length; i++) {
bytes[i] = binaryString.charCodeAt(i);
}
} else {
throw new Error(
"Failed to decode base64-encoded data URL, Buffer and atob are not supported"
);
}
result = await WebAssembly.instantiate(bytes, opts);
} else {
const response = await fetch(url);
const contentType = response.headers.get("Content-Type") || "";
if ("instantiateStreaming" in WebAssembly && contentType.startsWith("application/wasm")) {
result = await WebAssembly.instantiateStreaming(response, opts);
} else {
const buffer = await response.arrayBuffer();
result = await WebAssembly.instantiate(buffer, opts);
}
}
return result.instance;
};
const wasmHelperCode = wasmHelper.toString();
const wasmHelperPlugin = (config) => {
return {
name: "vite:wasm-helper",
resolveId(id) {
if (id === wasmHelperId) {
return id;
}
},
async load(id) {
if (id === wasmHelperId) {
return `export default ${wasmHelperCode}`;
}
if (!id.endsWith(".wasm?init")) {
return;
}
const url = await fileToUrl$1(id, config, this);
return `
import initWasm from "${wasmHelperId}"
export default opts => initWasm(opts, ${JSON.stringify(url)})
`;
}
};
};
const wasmFallbackPlugin = () => {
return {
name: "vite:wasm-fallback",
async load(id) {
if (!id.endsWith(".wasm")) {
return;
}
throw new Error(
'"ESM integration proposal for Wasm" is not supported currently. Use vite-plugin-wasm or other community plugins to handle this. Alternatively, you can use `.wasm?init` or `.wasm?url`. See https://vitejs.dev/guide/features.html#webassembly for more details.'
);
}
};
};
const workerOrSharedWorkerRE = /(?:\?|&)(worker|sharedworker)(?:&|$)/;
const workerFileRE = /(?:\?|&)worker_file&type=(\w+)(?:&|$)/;
const inlineRE = /[?&]inline\b/;
const WORKER_FILE_ID = "worker_file";
const workerCache = /* @__PURE__ */ new WeakMap();
function saveEmitWorkerAsset(config, asset) {
const workerMap = workerCache.get(config.mainConfig || config);
workerMap.assets.set(asset.fileName, asset);
}
async function bundleWorkerEntry(config, id) {
const input = cleanUrl(id);
const newBundleChain = [...config.bundleChain, input];
if (config.bundleChain.includes(input)) {
throw new Error(
`Circular worker imports detected. Vite does not support it. Import chain: ${newBundleChain.map((id2) => prettifyUrl(id2, config.root)).join(" -> ")}`
);
}
const { rollup } = await import('rollup');
const { plugins, rollupOptions, format } = config.worker;
const bundle = await rollup({
...rollupOptions,
input,
plugins: await plugins(newBundleChain),
onwarn(warning, warn) {
onRollupWarning(warning, warn, config);
},
preserveEntrySignatures: false
});
let chunk;
try {
const workerOutputConfig = config.worker.rollupOptions.output;
const workerConfig = workerOutputConfig ? Array.isArray(workerOutputConfig) ? workerOutputConfig[0] || {} : workerOutputConfig : {};
const {
output: [outputChunk, ...outputChunks]
} = await bundle.generate({
entryFileNames: path$n.posix.join(
config.build.assetsDir,
"[name]-[hash].js"
),
chunkFileNames: path$n.posix.join(
config.build.assetsDir,
"[name]-[hash].js"
),
assetFileNames: path$n.posix.join(
config.build.assetsDir,
"[name]-[hash].[ext]"
),
...workerConfig,
format,
sourcemap: config.build.sourcemap
});
chunk = outputChunk;
outputChunks.forEach((outputChunk2) => {
if (outputChunk2.type === "asset") {
saveEmitWorkerAsset(config, outputChunk2);
} else if (outputChunk2.type === "chunk") {
saveEmitWorkerAsset(config, {
fileName: outputChunk2.fileName,
originalFileName: null,
source: outputChunk2.code
});
}
});
} finally {
await bundle.close();
}
return emitSourcemapForWorkerEntry(config, chunk);
}
function emitSourcemapForWorkerEntry(config, chunk) {
const { map: sourcemap } = chunk;
if (sourcemap) {
if (config.build.sourcemap === "hidden" || config.build.sourcemap === true) {
const data = sourcemap.toString();
const mapFileName = chunk.fileName + ".map";
saveEmitWorkerAsset(config, {
fileName: mapFileName,
originalFileName: null,
source: data
});
}
}
return chunk;
}
const workerAssetUrlRE = /__VITE_WORKER_ASSET__([a-z\d]{8})__/g;
function encodeWorkerAssetFileName(fileName, workerCache2) {
const { fileNameHash } = workerCache2;
const hash = getHash(fileName);
if (!fileNameHash.get(hash)) {
fileNameHash.set(hash, fileName);
}
return `__VITE_WORKER_ASSET__${hash}__`;
}
async function workerFileToUrl(config, id) {
const workerMap = workerCache.get(config.mainConfig || config);
let fileName = workerMap.bundle.get(id);
if (!fileName) {
const outputChunk = await bundleWorkerEntry(config, id);
fileName = outputChunk.fileName;
saveEmitWorkerAsset(config, {
fileName,
originalFileName: null,
source: outputChunk.code
});
workerMap.bundle.set(id, fileName);
}
return encodeWorkerAssetFileName(fileName, workerMap);
}
function webWorkerPostPlugin() {
return {
name: "vite:worker-post",
resolveImportMeta(property, { format }) {
if (format === "iife") {
if (!property) {
return `{
url: self.location.href
}`;
}
if (property === "url") {
return "self.location.href";
}
}
return null;
}
};
}
function webWorkerPlugin(config) {
const isBuild = config.command === "build";
let server;
const isWorker = config.isWorker;
return {
name: "vite:worker",
configureServer(_server) {
server = _server;
},
buildStart() {
if (isWorker) {
return;
}
workerCache.set(config, {
assets: /* @__PURE__ */ new Map(),
bundle: /* @__PURE__ */ new Map(),
fileNameHash: /* @__PURE__ */ new Map()
});
},
load(id) {
if (isBuild && workerOrSharedWorkerRE.test(id)) {
return "";
}
},
shouldTransformCachedModule({ id }) {
if (isBuild && config.build.watch && workerOrSharedWorkerRE.test(id)) {
return true;
}
},
async transform(raw, id) {
const workerFileMatch = workerFileRE.exec(id);
if (workerFileMatch) {
const workerType2 = workerFileMatch[1];
let injectEnv = "";
const scriptPath = JSON.stringify(
path$n.posix.join(config.base, ENV_PUBLIC_PATH)
);
if (workerType2 === "classic") {
injectEnv = `importScripts(${scriptPath})
`;
} else if (workerType2 === "module") {
injectEnv = `import ${scriptPath}
`;
} else if (workerType2 === "ignore") {
if (isBuild) {
injectEnv = "";
} else if (server) {
const { moduleGraph } = server;
const module = moduleGraph.getModuleById(ENV_ENTRY);
injectEnv = module?.transformResult?.code || "";
}
}
if (injectEnv) {
const s = new MagicString(raw);
s.prepend(injectEnv + ";\n");
return {
code: s.toString(),
map: s.generateMap({ hires: "boundary" })
};
}
return;
}
const workerMatch = workerOrSharedWorkerRE.exec(id);
if (!workerMatch) return;
const { format } = config.worker;
const workerConstructor = workerMatch[1] === "sharedworker" ? "SharedWorker" : "Worker";
const workerType = isBuild ? format === "es" ? "module" : "classic" : "module";
const workerTypeOption = `{
${workerType === "module" ? `type: "module",` : ""}
name: options?.name
}`;
let urlCode;
if (isBuild) {
if (isWorker && config.bundleChain.at(-1) === cleanUrl(id)) {
urlCode = "self.location.href";
} else if (inlineRE.test(id)) {
const chunk = await bundleWorkerEntry(config, id);
const encodedJs = `const encodedJs = "${Buffer.from(
chunk.code
).toString("base64")}";`;
const code = (
// Using blob URL for SharedWorker results in multiple instances of a same worker
workerConstructor === "Worker" ? `${encodedJs}
const decodeBase64 = (base64) => Uint8Array.from(atob(base64), c => c.charCodeAt(0));
const blob = typeof self !== "undefined" && self.Blob && new Blob([${workerType === "classic" ? "" : (
// `URL` is always available, in `Worker[type="module"]`
`'URL.revokeObjectURL(import.meta.url);',`
)}decodeBase64(encodedJs)], { type: "text/javascript;charset=utf-8" });
export default function WorkerWrapper(options) {
let objURL;
try {
objURL = blob && (self.URL || self.webkitURL).createObjectURL(blob);
if (!objURL) throw ''
const worker = new ${workerConstructor}(objURL, ${workerTypeOption});
worker.addEventListener("error", () => {
(self.URL || self.webkitURL).revokeObjectURL(objURL);
});
return worker;
} catch(e) {
return new ${workerConstructor}(
"data:text/javascript;base64," + encodedJs,
${workerTypeOption}
);
}${// For module workers, we should not revoke the URL until the worker runs,
// otherwise the worker fails to run
workerType === "classic" ? ` finally {
objURL && (self.URL || self.webkitURL).revokeObjectURL(objURL);
}` : ""}
}` : `${encodedJs}
export default function WorkerWrapper(options) {
return new ${workerConstructor}(
"data:text/javascript;base64," + encodedJs,
${workerTypeOption}
);
}
`
);
return {
code,
// Empty sourcemap to suppress Rollup warning
map: { mappings: "" }
};
} else {
urlCode = JSON.stringify(await workerFileToUrl(config, id));
}
} else {
let url = await fileToUrl$1(cleanUrl(id), config, this);
url = injectQuery(url, `${WORKER_FILE_ID}&type=${workerType}`);
urlCode = JSON.stringify(url);
}
if (urlRE.test(id)) {
return {
code: `export default ${urlCode}`,
map: { mappings: "" }
// Empty sourcemap to suppress Rollup warning
};
}
return {
code: `export default function WorkerWrapper(options) {
return new ${workerConstructor}(
${urlCode},
${workerTypeOption}
);
}`,
map: { mappings: "" }
// Empty sourcemap to suppress Rollup warning
};
},
renderChunk(code, chunk, outputOptions) {
let s;
const result = () => {
return s && {
code: s.toString(),
map: config.build.sourcemap ? s.generateMap({ hires: "boundary" }) : null
};
};
workerAssetUrlRE.lastIndex = 0;
if (workerAssetUrlRE.test(code)) {
const toRelativeRuntime = createToImportMetaURLBasedRelativeRuntime(
outputOptions.format,
config.isWorker
);
let match;
s = new MagicString(code);
workerAssetUrlRE.lastIndex = 0;
const workerMap = workerCache.get(config.mainConfig || config);
const { fileNameHash } = workerMap;
while (match = workerAssetUrlRE.exec(code)) {
const [full, hash] = match;
const filename = fileNameHash.get(hash);
const replacement = toOutputFilePathInJS(
filename,
"asset",
chunk.fileName,
"js",
config,
toRelativeRuntime
);
const replacementString = typeof replacement === "string" ? JSON.stringify(encodeURIPath(replacement)).slice(1, -1) : `"+${replacement.runtime}+"`;
s.update(match.index, match.index + full.length, replacementString);
}
}
return result();
},
generateBundle(opts, bundle) {
if (opts.__vite_skip_asset_emit__ || isWorker) {
return;
}
const workerMap = workerCache.get(config);
workerMap.assets.forEach((asset) => {
const duplicateAsset = bundle[asset.fileName];
if (duplicateAsset) {
const content = duplicateAsset.type === "asset" ? duplicateAsset.source : duplicateAsset.code;
if (isSameContent(content, asset.source)) {
return;
}
}
this.emitFile({
type: "asset",
fileName: asset.fileName,
originalFileName: asset.originalFileName,
source: asset.source
});
});
workerMap.assets.clear();
}
};
}
function isSameContent(a, b) {
if (typeof a === "string") {
if (typeof b === "string") {
return a === b;
}
return Buffer.from(a).equals(b);
}
return Buffer.from(b).equals(a);
}
function preAliasPlugin(config) {
const findPatterns = getAliasPatterns(config.resolve.alias);
const isConfiguredAsExternal = createIsConfiguredAsSsrExternal(config);
const isBuild = config.command === "build";
const fsUtils = getFsUtils(config);
return {
name: "vite:pre-alias",
async resolveId(id, importer, options) {
const ssr = options?.ssr === true;
const depsOptimizer = !isBuild && getDepsOptimizer(config, ssr);
if (importer && depsOptimizer && bareImportRE.test(id) && !options?.scan && id !== "@vite/client" && id !== "@vite/env") {
if (findPatterns.find((pattern) => matches(pattern, id))) {
const optimizedId = await tryOptimizedResolve(
depsOptimizer,
id,
importer,
config.resolve.preserveSymlinks,
config.packageCache
);
if (optimizedId) {
return optimizedId;
}
if (depsOptimizer.options.noDiscovery) {
return;
}
const resolved = await this.resolve(id, importer, {
...options,
custom: { ...options.custom, "vite:pre-alias": true }
});
if (resolved && !depsOptimizer.isOptimizedDepFile(resolved.id)) {
const optimizeDeps = depsOptimizer.options;
const resolvedId = cleanUrl(resolved.id);
const isVirtual = resolvedId === id || resolvedId.includes("\0");
if (!isVirtual && fsUtils.existsSync(resolvedId) && !moduleListContains(optimizeDeps.exclude, id) && path$n.isAbsolute(resolvedId) && (isInNodeModules$1(resolvedId) || optimizeDeps.include?.includes(id)) && isOptimizable(resolvedId, optimizeDeps) && !(isBuild && ssr && isConfiguredAsExternal(id, importer)) && (!ssr || optimizeAliasReplacementForSSR(resolvedId, optimizeDeps))) {
const optimizedInfo = depsOptimizer.registerMissingImport(
id,
resolvedId
);
return { id: depsOptimizer.getOptimizedDepId(optimizedInfo) };
}
}
return resolved;
}
}
}
};
}
function optimizeAliasReplacementForSSR(id, optimizeDeps) {
if (optimizeDeps.include?.includes(id)) {
return true;
}
return false;
}
function matches(pattern, importee) {
if (pattern instanceof RegExp) {
return pattern.test(importee);
}
if (importee.length < pattern.length) {
return false;
}
if (importee === pattern) {
return true;
}
return importee.startsWith(withTrailingSlash(pattern));
}
function getAliasPatterns(entries) {
if (!entries) {
return [];
}
if (Array.isArray(entries)) {
return entries.map((entry) => entry.find);
}
return Object.entries(entries).map(([find]) => find);
}
function getAliasPatternMatcher(entries) {
const patterns = getAliasPatterns(entries);
return (importee) => patterns.some((pattern) => matches(pattern, importee));
}
function err(e, pos) {
const error = new Error(e);
error.pos = pos;
return error;
}
function parseWorkerOptions(rawOpts, optsStartIndex) {
let opts = {};
try {
opts = evalValue(rawOpts);
} catch {
throw err(
"Vite is unable to parse the worker options as the value is not static.To ignore this error, please use /* @vite-ignore */ in the worker options.",
optsStartIndex
);
}
if (opts == null) {
return {};
}
if (typeof opts !== "object") {
throw err(
`Expected worker options to be an object, got ${typeof opts}`,
optsStartIndex
);
}
return opts;
}
function getWorkerType(raw, clean, i) {
const commaIndex = clean.indexOf(",", i);
if (commaIndex === -1) {
return "classic";
}
const endIndex = clean.indexOf(")", i);
if (commaIndex > endIndex) {
return "classic";
}
const workerOptString = raw.substring(commaIndex + 1, endIndex).replace(/\}[\s\S]*,/g, "}");
const hasViteIgnore = hasViteIgnoreRE.test(workerOptString);
if (hasViteIgnore) {
return "ignore";
}
const cleanWorkerOptString = clean.substring(commaIndex + 1, endIndex).trim();
if (!cleanWorkerOptString.length) {
return "classic";
}
const workerOpts = parseWorkerOptions(workerOptString, commaIndex + 1);
if (workerOpts.type && (workerOpts.type === "module" || workerOpts.type === "classic")) {
return workerOpts.type;
}
return "classic";
}
function isIncludeWorkerImportMetaUrl(code) {
if ((code.includes("new Worker") || code.includes("new SharedWorker")) && code.includes("new URL") && code.includes(`import.meta.url`)) {
return true;
}
return false;
}
function workerImportMetaUrlPlugin(config) {
const isBuild = config.command === "build";
let workerResolver;
const fsResolveOptions = {
...config.resolve,
root: config.root,
isProduction: config.isProduction,
isBuild: config.command === "build",
packageCache: config.packageCache,
ssrConfig: config.ssr,
asSrc: true
};
return {
name: "vite:worker-import-meta-url",
shouldTransformCachedModule({ code }) {
if (isBuild && config.build.watch && isIncludeWorkerImportMetaUrl(code)) {
return true;
}
},
async transform(code, id, options) {
if (!options?.ssr && isIncludeWorkerImportMetaUrl(code)) {
let s;
const cleanString = stripLiteral(code);
const workerImportMetaUrlRE = /\bnew\s+(?:Worker|SharedWorker)\s*\(\s*(new\s+URL\s*\(\s*('[^']+'|"[^"]+"|`[^`]+`)\s*,\s*import\.meta\.url\s*\))/dg;
let match;
while (match = workerImportMetaUrlRE.exec(cleanString)) {
const [[, endIndex], [expStart, expEnd], [urlStart, urlEnd]] = match.indices;
const rawUrl = code.slice(urlStart, urlEnd);
if (rawUrl[0] === "`" && rawUrl.includes("${")) {
this.error(
`\`new URL(url, import.meta.url)\` is not supported in dynamic template string.`,
expStart
);
}
s ||= new MagicString(code);
const workerType = getWorkerType(code, cleanString, endIndex);
const url = rawUrl.slice(1, -1);
let file;
if (url[0] === ".") {
file = path$n.resolve(path$n.dirname(id), url);
file = tryFsResolve(file, fsResolveOptions) ?? file;
} else {
workerResolver ??= config.createResolver({
extensions: [],
tryIndex: false,
preferRelative: true
});
file = await workerResolver(url, id);
file ??= url[0] === "/" ? slash$1(path$n.join(config.publicDir, url)) : slash$1(path$n.resolve(path$n.dirname(id), url));
}
if (isBuild && config.isWorker && config.bundleChain.at(-1) === cleanUrl(file)) {
s.update(expStart, expEnd, "self.location.href");
} else {
let builtUrl;
if (isBuild) {
builtUrl = await workerFileToUrl(config, file);
} else {
builtUrl = await fileToUrl$1(cleanUrl(file), config, this);
builtUrl = injectQuery(
builtUrl,
`${WORKER_FILE_ID}&type=${workerType}`
);
}
s.update(
expStart,
expEnd,
`new URL(/* @vite-ignore */ ${JSON.stringify(builtUrl)}, import.meta.url)`
);
}
}
if (s) {
return transformStableResult(s, id, config);
}
return null;
}
}
};
}
function assetImportMetaUrlPlugin(config) {
const { publicDir } = config;
let assetResolver;
const fsResolveOptions = {
...config.resolve,
root: config.root,
isProduction: config.isProduction,
isBuild: config.command === "build",
packageCache: config.packageCache,
ssrConfig: config.ssr,
asSrc: true
};
return {
name: "vite:asset-import-meta-url",
async transform(code, id, options) {
if (!options?.ssr && id !== preloadHelperId && id !== CLIENT_ENTRY && code.includes("new URL") && code.includes(`import.meta.url`)) {
let s;
const assetImportMetaUrlRE = /\bnew\s+URL\s*\(\s*('[^']+'|"[^"]+"|`[^`]+`)\s*,\s*import\.meta\.url\s*(?:,\s*)?\)/dg;
const cleanString = stripLiteral(code);
let match;
while (match = assetImportMetaUrlRE.exec(cleanString)) {
const [[startIndex, endIndex], [urlStart, urlEnd]] = match.indices;
if (hasViteIgnoreRE.test(code.slice(startIndex, urlStart))) continue;
const rawUrl = code.slice(urlStart, urlEnd);
if (!s) s = new MagicString(code);
if (rawUrl[0] === "`" && rawUrl.includes("${")) {
const queryDelimiterIndex = getQueryDelimiterIndex(rawUrl);
const hasQueryDelimiter = queryDelimiterIndex !== -1;
const pureUrl = hasQueryDelimiter ? rawUrl.slice(0, queryDelimiterIndex) + "`" : rawUrl;
const queryString = hasQueryDelimiter ? rawUrl.slice(queryDelimiterIndex, -1) : "";
const ast = this.parse(pureUrl);
const templateLiteral = ast.body[0].expression;
if (templateLiteral.expressions.length) {
const pattern = buildGlobPattern(templateLiteral);
if (pattern.startsWith("**")) {
continue;
}
const globOptions = {
eager: true,
import: "default",
// A hack to allow 'as' & 'query' exist at the same time
query: injectQuery(queryString, "url")
};
s.update(
startIndex,
endIndex,
`new URL((import.meta.glob(${JSON.stringify(
pattern
)}, ${JSON.stringify(
globOptions
)}))[${pureUrl}], import.meta.url)`
);
continue;
}
}
const url = rawUrl.slice(1, -1);
let file;
if (url[0] === ".") {
file = slash$1(path$n.resolve(path$n.dirname(id), url));
file = tryFsResolve(file, fsResolveOptions) ?? file;
} else {
assetResolver ??= config.createResolver({
extensions: [],
mainFields: [],
tryIndex: false,
preferRelative: true
});
file = await assetResolver(url, id);
file ??= url[0] === "/" ? slash$1(path$n.join(publicDir, url)) : slash$1(path$n.resolve(path$n.dirname(id), url));
}
let builtUrl;
if (file) {
try {
if (publicDir && isParentDirectory(publicDir, file)) {
const publicPath = "/" + path$n.posix.relative(publicDir, file);
builtUrl = await fileToUrl$1(publicPath, config, this);
} else {
builtUrl = await fileToUrl$1(file, config, this);
}
} catch {
}
}
if (!builtUrl) {
const rawExp = code.slice(startIndex, endIndex);
config.logger.warnOnce(
`
${rawExp} doesn't exist at build time, it will remain unchanged to be resolved at runtime. If this is intended, you can use the /* @vite-ignore */ comment to suppress this warning.`
);
builtUrl = url;
}
s.update(
startIndex,
endIndex,
`new URL(${JSON.stringify(builtUrl)}, import.meta.url)`
);
}
if (s) {
return transformStableResult(s, id, config);
}
}
return null;
}
};
}
function buildGlobPattern(ast) {
let pattern = "";
let lastElementIndex = -1;
for (const exp of ast.expressions) {
for (let i = lastElementIndex + 1; i < ast.quasis.length; i++) {
const el = ast.quasis[i];
if (el.end < exp.start) {
pattern += el.value.raw;
lastElementIndex = i;
}
}
pattern += "**";
}
for (let i = lastElementIndex + 1; i < ast.quasis.length; i++) {
pattern += ast.quasis[i].value.raw;
}
return pattern;
}
function getQueryDelimiterIndex(rawUrl) {
let bracketsStack = 0;
for (let i = 0; i < rawUrl.length; i++) {
if (rawUrl[i] === "{") {
bracketsStack++;
} else if (rawUrl[i] === "}") {
bracketsStack--;
} else if (rawUrl[i] === "?" && bracketsStack === 0) {
return i;
}
}
return -1;
}
function metadataPlugin() {
return {
name: "vite:build-metadata",
async renderChunk(_code, chunk) {
chunk.viteMetadata = {
importedAssets: /* @__PURE__ */ new Set(),
importedCss: /* @__PURE__ */ new Set()
};
return null;
}
};
}
// Astring is a tiny and fast JavaScript code generator from an ESTree-compliant AST.
//
// Astring was written by David Bonnet and released under an MIT license.
//
// The Git repository for Astring is available at:
// https://github.com/davidbonnet/astring.git
//
// Please use the GitHub bug tracker to report issues:
// https://github.com/davidbonnet/astring/issues
/* c8 ignore if */
if (!String.prototype.repeat) {
/* c8 ignore next */
throw new Error(
'String.prototype.repeat is undefined, see https://github.com/davidbonnet/astring#installation',
)
}
/* c8 ignore if */
if (!String.prototype.endsWith) {
/* c8 ignore next */
throw new Error(
'String.prototype.endsWith is undefined, see https://github.com/davidbonnet/astring#installation',
)
}
class VariableDynamicImportError extends Error {}
/* eslint-disable-next-line no-template-curly-in-string */
const example = 'For example: import(`./foo/${bar}.js`).';
function sanitizeString(str) {
if (str === '') return str;
if (str.includes('*')) {
throw new VariableDynamicImportError('A dynamic import cannot contain * characters.');
}
return glob.escapePath(str);
}
function templateLiteralToGlob(node) {
let glob = '';
for (let i = 0; i < node.quasis.length; i += 1) {
glob += sanitizeString(node.quasis[i].value.raw);
if (node.expressions[i]) {
glob += expressionToGlob(node.expressions[i]);
}
}
return glob;
}
function callExpressionToGlob(node) {
const { callee } = node;
if (
callee.type === 'MemberExpression' &&
callee.property.type === 'Identifier' &&
callee.property.name === 'concat'
) {
return `${expressionToGlob(callee.object)}${node.arguments.map(expressionToGlob).join('')}`;
}
return '*';
}
function binaryExpressionToGlob(node) {
if (node.operator !== '+') {
throw new VariableDynamicImportError(`${node.operator} operator is not supported.`);
}
return `${expressionToGlob(node.left)}${expressionToGlob(node.right)}`;
}
function expressionToGlob(node) {
switch (node.type) {
case 'TemplateLiteral':
return templateLiteralToGlob(node);
case 'CallExpression':
return callExpressionToGlob(node);
case 'BinaryExpression':
return binaryExpressionToGlob(node);
case 'Literal': {
return sanitizeString(node.value);
}
default:
return '*';
}
}
const defaultProtocol = 'file:';
const ignoredProtocols = ['data:', 'http:', 'https:'];
function shouldIgnore(glob) {
const containsAsterisk = glob.includes('*');
const globURL = new URL(glob, defaultProtocol);
const containsIgnoredProtocol = ignoredProtocols.some(
(ignoredProtocol) => ignoredProtocol === globURL.protocol
);
return !containsAsterisk || containsIgnoredProtocol;
}
function dynamicImportToGlob(node, sourceString) {
let glob = expressionToGlob(node);
if (shouldIgnore(glob)) {
return null;
}
glob = glob.replace(/\*\*/g, '*');
if (glob.startsWith('*')) {
throw new VariableDynamicImportError(
`invalid import "${sourceString}". It cannot be statically analyzed. Variable dynamic imports must start with ./ and be limited to a specific directory. ${example}`
);
}
if (glob.startsWith('/')) {
throw new VariableDynamicImportError(
`invalid import "${sourceString}". Variable absolute imports are not supported, imports must start with ./ in the static part of the import. ${example}`
);
}
if (!glob.startsWith('./') && !glob.startsWith('../')) {
throw new VariableDynamicImportError(
`invalid import "${sourceString}". Variable bare imports are not supported, imports must start with ./ in the static part of the import. ${example}`
);
}
// Disallow ./*.ext
const ownDirectoryStarExtension = /^\.\/\*\.[\w]+$/;
if (ownDirectoryStarExtension.test(glob)) {
throw new VariableDynamicImportError(
`${
`invalid import "${sourceString}". Variable imports cannot import their own directory, ` +
'place imports in a separate directory or make the import filename more specific. '
}${example}`
);
}
if (require$$0$4.extname(glob) === '') {
throw new VariableDynamicImportError(
`invalid import "${sourceString}". A file extension must be included in the static part of the import. ${example}`
);
}
return glob;
}
const dynamicImportHelperId = "\0vite/dynamic-import-helper.js";
const relativePathRE = /^\.{1,2}\//;
const hasDynamicImportRE = /\bimport\s*[(/]/;
const dynamicImportHelper = (glob, path, segs) => {
const v = glob[path];
if (v) {
return typeof v === "function" ? v() : Promise.resolve(v);
}
return new Promise((_, reject) => {
(typeof queueMicrotask === "function" ? queueMicrotask : setTimeout)(
reject.bind(
null,
new Error(
"Unknown variable dynamic import: " + path + (path.split("/").length !== segs ? ". Note that variables only represent file names one level deep." : "")
)
)
);
});
};
function parseDynamicImportPattern(strings) {
const filename = strings.slice(1, -1);
const ast = parseAst(strings).body[0].expression;
const userPatternQuery = dynamicImportToGlob(ast, filename);
if (!userPatternQuery) {
return null;
}
const [userPattern] = userPatternQuery.split(
// ? is escaped on posix OS
requestQueryMaybeEscapedSplitRE,
2
);
let [rawPattern, search] = filename.split(requestQuerySplitRE, 2);
let globParams = null;
if (search) {
search = "?" + search;
if (workerOrSharedWorkerRE.test(search) || urlRE.test(search) || rawRE.test(search)) {
globParams = {
query: search,
import: "*"
};
} else {
globParams = {
query: search
};
}
}
return {
globParams,
userPattern,
rawPattern
};
}
async function transformDynamicImport(importSource, importer, resolve, root) {
if (importSource[1] !== "." && importSource[1] !== "/") {
const resolvedFileName = await resolve(importSource.slice(1, -1), importer);
if (!resolvedFileName) {
return null;
}
const relativeFileName = normalizePath$3(
posix$1.relative(
posix$1.dirname(normalizePath$3(importer)),
normalizePath$3(resolvedFileName)
)
);
importSource = "`" + (relativeFileName[0] === "." ? "" : "./") + relativeFileName + "`";
}
const dynamicImportPattern = parseDynamicImportPattern(importSource);
if (!dynamicImportPattern) {
return null;
}
const { globParams, rawPattern, userPattern } = dynamicImportPattern;
const params = globParams ? `, ${JSON.stringify(globParams)}` : "";
let newRawPattern = posix$1.relative(
posix$1.dirname(importer),
await toAbsoluteGlob(rawPattern, root, importer, resolve)
);
if (!relativePathRE.test(newRawPattern)) {
newRawPattern = `./${newRawPattern}`;
}
const exp = `(import.meta.glob(${JSON.stringify(userPattern)}${params}))`;
return {
rawPattern: newRawPattern,
pattern: userPattern,
glob: exp
};
}
function dynamicImportVarsPlugin(config) {
const resolve = config.createResolver({
preferRelative: true,
tryIndex: false,
extensions: []
});
const { include, exclude, warnOnError } = config.build.dynamicImportVarsOptions;
const filter = createFilter(include, exclude);
return {
name: "vite:dynamic-import-vars",
resolveId(id) {
if (id === dynamicImportHelperId) {
return id;
}
},
load(id) {
if (id === dynamicImportHelperId) {
return "export default " + dynamicImportHelper.toString();
}
},
async transform(source, importer) {
if (!filter(importer) || importer === CLIENT_ENTRY || !hasDynamicImportRE.test(source)) {
return;
}
await init;
let imports = [];
try {
imports = parse$d(source)[0];
} catch (e) {
return null;
}
if (!imports.length) {
return null;
}
let s;
let needDynamicImportHelper = false;
for (let index = 0; index < imports.length; index++) {
const {
s: start,
e: end,
ss: expStart,
se: expEnd,
d: dynamicIndex
} = imports[index];
if (dynamicIndex === -1 || source[start] !== "`") {
continue;
}
if (hasViteIgnoreRE.test(source.slice(expStart, expEnd))) {
continue;
}
s ||= new MagicString(source);
let result;
try {
result = await transformDynamicImport(
source.slice(start, end),
importer,
resolve,
config.root
);
} catch (error) {
if (warnOnError) {
this.warn(error);
} else {
this.error(error);
}
}
if (!result) {
continue;
}
const { rawPattern, glob } = result;
needDynamicImportHelper = true;
s.overwrite(
expStart,
expEnd,
`__variableDynamicImportRuntimeHelper(${glob}, \`${rawPattern}\`, ${rawPattern.split("/").length})`
);
}
if (s) {
if (needDynamicImportHelper) {
s.prepend(
`import __variableDynamicImportRuntimeHelper from "${dynamicImportHelperId}";`
);
}
return transformStableResult(s, importer, config);
}
}
};
}
async function resolvePlugins(config, prePlugins, normalPlugins, postPlugins) {
const isBuild = config.command === "build";
const isWorker = config.isWorker;
const buildPlugins = isBuild ? await (await Promise.resolve().then(function () { return build$1; })).resolveBuildPlugins(config) : { pre: [], post: [] };
const { modulePreload } = config.build;
const depsOptimizerEnabled = !isBuild && (isDepsOptimizerEnabled(config, false) || isDepsOptimizerEnabled(config, true));
return [
depsOptimizerEnabled ? optimizedDepsPlugin(config) : null,
isBuild ? metadataPlugin() : null,
!isWorker ? watchPackageDataPlugin(config.packageCache) : null,
preAliasPlugin(config),
alias$1({
entries: config.resolve.alias,
customResolver: viteAliasCustomResolver
}),
...prePlugins,
modulePreload !== false && modulePreload.polyfill ? modulePreloadPolyfillPlugin(config) : null,
resolvePlugin({
...config.resolve,
root: config.root,
isProduction: config.isProduction,
isBuild,
packageCache: config.packageCache,
ssrConfig: config.ssr,
asSrc: true,
fsUtils: getFsUtils(config),
getDepsOptimizer: isBuild ? void 0 : (ssr) => getDepsOptimizer(config, ssr),
shouldExternalize: isBuild && config.build.ssr ? (id, importer) => shouldExternalizeForSSR(id, importer, config) : void 0
}),
htmlInlineProxyPlugin(config),
cssPlugin(config),
config.esbuild !== false ? esbuildPlugin(config) : null,
jsonPlugin(
{
namedExports: true,
...config.json
},
isBuild
),
wasmHelperPlugin(config),
webWorkerPlugin(config),
assetPlugin(config),
...normalPlugins,
wasmFallbackPlugin(),
definePlugin(config),
cssPostPlugin(config),
isBuild && buildHtmlPlugin(config),
workerImportMetaUrlPlugin(config),
assetImportMetaUrlPlugin(config),
...buildPlugins.pre,
dynamicImportVarsPlugin(config),
importGlobPlugin(config),
...postPlugins,
...buildPlugins.post,
// internal server-only plugins are always applied after everything else
...isBuild ? [] : [
clientInjectionsPlugin(config),
cssAnalysisPlugin(config),
importAnalysisPlugin(config)
]
].filter(Boolean);
}
function createPluginHookUtils(plugins) {
const sortedPluginsCache = /* @__PURE__ */ new Map();
function getSortedPlugins(hookName) {
if (sortedPluginsCache.has(hookName))
return sortedPluginsCache.get(hookName);
const sorted = getSortedPluginsByHook(hookName, plugins);
sortedPluginsCache.set(hookName, sorted);
return sorted;
}
function getSortedPluginHooks(hookName) {
const plugins2 = getSortedPlugins(hookName);
return plugins2.map((p) => getHookHandler(p[hookName])).filter(Boolean);
}
return {
getSortedPlugins,
getSortedPluginHooks
};
}
function getSortedPluginsByHook(hookName, plugins) {
const sortedPlugins = [];
let pre = 0, normal = 0, post = 0;
for (const plugin of plugins) {
const hook = plugin[hookName];
if (hook) {
if (typeof hook === "object") {
if (hook.order === "pre") {
sortedPlugins.splice(pre++, 0, plugin);
continue;
}
if (hook.order === "post") {
sortedPlugins.splice(pre + normal + post++, 0, plugin);
continue;
}
}
sortedPlugins.splice(pre + normal++, 0, plugin);
}
}
return sortedPlugins;
}
function getHookHandler(hook) {
return typeof hook === "object" ? hook.handler : hook;
}
const viteAliasCustomResolver = async function(id, importer, options) {
const resolved = await this.resolve(id, importer, options);
return resolved || { id, meta: { "vite:alias": { noResolved: true } } };
};
function ansiRegex({onlyFirst = false} = {}) {
const pattern = [
'[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)',
'(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'
].join('|');
return new RegExp(pattern, onlyFirst ? undefined : 'g');
}
const regex = ansiRegex();
function stripAnsi(string) {
if (typeof string !== 'string') {
throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``);
}
// Even though the regex is global, we don't need to reset the `.lastIndex`
// because unlike `.exec()` and `.test()`, `.replace()` does it automatically
// and doing it manually has a performance penalty.
return string.replace(regex, '');
}
function prepareError(err) {
return {
message: stripAnsi(err.message),
stack: stripAnsi(cleanStack(err.stack || "")),
id: err.id,
frame: stripAnsi(err.frame || ""),
plugin: err.plugin,
pluginCode: err.pluginCode?.toString(),
loc: err.loc
};
}
function buildErrorMessage(err, args = [], includeStack = true) {
if (err.plugin) args.push(` Plugin: ${colors$1.magenta(err.plugin)}`);
const loc = err.loc ? `:${err.loc.line}:${err.loc.column}` : "";
if (err.id) args.push(` File: ${colors$1.cyan(err.id)}${loc}`);
if (err.frame) args.push(colors$1.yellow(pad$1(err.frame)));
if (includeStack && err.stack) args.push(pad$1(cleanStack(err.stack)));
return args.join("\n");
}
function cleanStack(stack) {
return stack.split(/\n/).filter((l) => /^\s*at/.test(l)).join("\n");
}
function logError(server, err) {
const msg = buildErrorMessage(err, [
colors$1.red(`Internal server error: ${err.message}`)
]);
server.config.logger.error(msg, {
clear: true,
timestamp: true,
error: err
});
server.hot.send({
type: "error",
err: prepareError(err)
});
}
function errorMiddleware(server, allowNext = false) {
return function viteErrorMiddleware(err, _req, res, next) {
logError(server, err);
if (allowNext) {
next();
} else {
res.statusCode = 500;
res.end(`
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>Error</title>
<script type="module">
const error = ${JSON.stringify(prepareError(err)).replace(
/</g,
"\\u003c"
)}
try {
const { ErrorOverlay } = await import(${JSON.stringify(path$n.posix.join(server.config.base, CLIENT_PUBLIC_PATH))})
document.body.appendChild(new ErrorOverlay(error))
} catch {
const h = (tag, text) => {
const el = document.createElement(tag)
el.textContent = text
return el
}
document.body.appendChild(h('h1', 'Internal Server Error'))
document.body.appendChild(h('h2', error.message))
document.body.appendChild(h('pre', error.stack))
document.body.appendChild(h('p', '(Error overlay failed to load)'))
}
<\/script>
</head>
<body>
</body>
</html>
`);
}
};
}
const noop$3 = () => {
};
const EMPTY_OBJECT = Object.freeze({});
const debugSourcemapCombineFilter = process.env.DEBUG_VITE_SOURCEMAP_COMBINE_FILTER;
const debugSourcemapCombine = createDebugger("vite:sourcemap-combine", {
onlyWhenFocused: true
});
const debugResolve = createDebugger("vite:resolve");
const debugPluginResolve = createDebugger("vite:plugin-resolve", {
onlyWhenFocused: "vite:plugin"
});
const debugPluginTransform = createDebugger("vite:plugin-transform", {
onlyWhenFocused: "vite:plugin"
});
const ERR_CLOSED_SERVER = "ERR_CLOSED_SERVER";
function throwClosedServerError() {
const err = new Error(
"The server is being restarted or closed. Request is outdated"
);
err.code = ERR_CLOSED_SERVER;
throw err;
}
async function createPluginContainer(config, moduleGraph, watcher) {
const container = new PluginContainer(config, moduleGraph, watcher);
await container.resolveRollupOptions();
return container;
}
class PluginContainer {
/**
* @internal use `createPluginContainer` instead
*/
constructor(config, moduleGraph, watcher, plugins = config.plugins) {
this.config = config;
this.moduleGraph = moduleGraph;
this.watcher = watcher;
this.plugins = plugins;
this.minimalContext = {
meta: {
rollupVersion,
watchMode: true
},
debug: noop$3,
info: noop$3,
warn: noop$3,
// @ts-expect-error noop
error: noop$3
};
const utils = createPluginHookUtils(plugins);
this.getSortedPlugins = utils.getSortedPlugins;
this.getSortedPluginHooks = utils.getSortedPluginHooks;
}
_pluginContextMap = /* @__PURE__ */ new Map();
_pluginContextMapSsr = /* @__PURE__ */ new Map();
_resolvedRollupOptions;
_processesing = /* @__PURE__ */ new Set();
_seenResolves = {};
_closed = false;
// _addedFiles from the `load()` hook gets saved here so it can be reused in the `transform()` hook
_moduleNodeToLoadAddedImports = /* @__PURE__ */ new WeakMap();
getSortedPluginHooks;
getSortedPlugins;
watchFiles = /* @__PURE__ */ new Set();
minimalContext;
_updateModuleLoadAddedImports(id, addedImports) {
const module = this.moduleGraph?.getModuleById(id);
if (module) {
this._moduleNodeToLoadAddedImports.set(module, addedImports);
}
}
_getAddedImports(id) {
const module = this.moduleGraph?.getModuleById(id);
return module ? this._moduleNodeToLoadAddedImports.get(module) || null : null;
}
getModuleInfo(id) {
const module = this.moduleGraph?.getModuleById(id);
if (!module) {
return null;
}
if (!module.info) {
module.info = new Proxy(
{ id, meta: module.meta || EMPTY_OBJECT },
// throw when an unsupported ModuleInfo property is accessed,
// so that incompatible plugins fail in a non-cryptic way.
{
get(info, key) {
if (key in info) {
return info[key];
}
if (key === "then") {
return void 0;
}
throw Error(
`[vite] The "${key}" property of ModuleInfo is not supported.`
);
}
}
);
}
return module.info ?? null;
}
// keeps track of hook promises so that we can wait for them all to finish upon closing the server
handleHookPromise(maybePromise) {
if (!maybePromise?.then) {
return maybePromise;
}
const promise = maybePromise;
this._processesing.add(promise);
return promise.finally(() => this._processesing.delete(promise));
}
get options() {
return this._resolvedRollupOptions;
}
async resolveRollupOptions() {
if (!this._resolvedRollupOptions) {
let options = this.config.build.rollupOptions;
for (const optionsHook of this.getSortedPluginHooks("options")) {
if (this._closed) {
throwClosedServerError();
}
options = await this.handleHookPromise(
optionsHook.call(this.minimalContext, options)
) || options;
}
this._resolvedRollupOptions = options;
}
return this._resolvedRollupOptions;
}
_getPluginContext(plugin, ssr) {
const map = ssr ? this._pluginContextMapSsr : this._pluginContextMap;
if (!map.has(plugin)) {
const ctx = new PluginContext(plugin, this, ssr);
map.set(plugin, ctx);
}
return map.get(plugin);
}
// parallel, ignores returns
async hookParallel(hookName, context, args) {
const parallelPromises = [];
for (const plugin of this.getSortedPlugins(hookName)) {
const hook = plugin[hookName];
if (!hook) continue;
const handler = getHookHandler(hook);
if (hook.sequential) {
await Promise.all(parallelPromises);
parallelPromises.length = 0;
await handler.apply(context(plugin), args(plugin));
} else {
parallelPromises.push(handler.apply(context(plugin), args(plugin)));
}
}
await Promise.all(parallelPromises);
}
async buildStart(_options) {
await this.handleHookPromise(
this.hookParallel(
"buildStart",
(plugin) => this._getPluginContext(plugin, false),
() => [this.options]
)
);
}
async resolveId(rawId, importer = join$2(this.config.root, "index.html"), options) {
const skip = options?.skip;
const ssr = options?.ssr;
const scan = !!options?.scan;
const ctx = new ResolveIdContext(this, !!ssr, skip, scan);
const resolveStart = debugResolve ? performance$1.now() : 0;
let id = null;
const partial = {};
for (const plugin of this.getSortedPlugins("resolveId")) {
if (this._closed && !ssr) throwClosedServerError();
if (!plugin.resolveId) continue;
if (skip?.has(plugin)) continue;
ctx._plugin = plugin;
const pluginResolveStart = debugPluginResolve ? performance$1.now() : 0;
const handler = getHookHandler(plugin.resolveId);
const result = await this.handleHookPromise(
handler.call(ctx, rawId, importer, {
attributes: options?.attributes ?? {},
custom: options?.custom,
isEntry: !!options?.isEntry,
ssr,
scan
})
);
if (!result) continue;
if (typeof result === "string") {
id = result;
} else {
id = result.id;
Object.assign(partial, result);
}
debugPluginResolve?.(
timeFrom(pluginResolveStart),
plugin.name,
prettifyUrl(id, this.config.root)
);
break;
}
if (debugResolve && rawId !== id && !rawId.startsWith(FS_PREFIX)) {
const key = rawId + id;
if (!this._seenResolves[key]) {
this._seenResolves[key] = true;
debugResolve(
`${timeFrom(resolveStart)} ${colors$1.cyan(rawId)} -> ${colors$1.dim(
id
)}`
);
}
}
if (id) {
partial.id = isExternalUrl(id) ? id : normalizePath$3(id);
return partial;
} else {
return null;
}
}
async load(id, options) {
const ssr = options?.ssr;
const ctx = new LoadPluginContext(this, !!ssr);
for (const plugin of this.getSortedPlugins("load")) {
if (this._closed && !ssr) throwClosedServerError();
if (!plugin.load) continue;
ctx._plugin = plugin;
const handler = getHookHandler(plugin.load);
const result = await this.handleHookPromise(
handler.call(ctx, id, { ssr })
);
if (result != null) {
if (isObject$1(result)) {
ctx._updateModuleInfo(id, result);
}
this._updateModuleLoadAddedImports(id, ctx._addedImports);
return result;
}
}
this._updateModuleLoadAddedImports(id, ctx._addedImports);
return null;
}
async transform(code, id, options) {
const inMap = options?.inMap;
const ssr = options?.ssr;
const ctx = new TransformPluginContext(
this,
id,
code,
inMap,
!!ssr
);
ctx._addedImports = this._getAddedImports(id);
for (const plugin of this.getSortedPlugins("transform")) {
if (this._closed && !ssr) throwClosedServerError();
if (!plugin.transform) continue;
ctx._updateActiveInfo(plugin, id, code);
const start = debugPluginTransform ? performance$1.now() : 0;
let result;
const handler = getHookHandler(plugin.transform);
try {
result = await this.handleHookPromise(
handler.call(ctx, code, id, { ssr })
);
} catch (e) {
ctx.error(e);
}
if (!result) continue;
debugPluginTransform?.(
timeFrom(start),
plugin.name,
prettifyUrl(id, this.config.root)
);
if (isObject$1(result)) {
if (result.code !== void 0) {
code = result.code;
if (result.map) {
if (debugSourcemapCombine) {
result.map.name = plugin.name;
}
ctx.sourcemapChain.push(result.map);
}
}
ctx._updateModuleInfo(id, result);
} else {
code = result;
}
}
return {
code,
map: ctx._getCombinedSourcemap()
};
}
async watchChange(id, change) {
await this.hookParallel(
"watchChange",
(plugin) => this._getPluginContext(plugin, false),
() => [id, change]
);
}
async close() {
if (this._closed) return;
this._closed = true;
await Promise.allSettled(Array.from(this._processesing));
await this.hookParallel(
"buildEnd",
(plugin) => this._getPluginContext(plugin, false),
() => []
);
await this.hookParallel(
"closeBundle",
(plugin) => this._getPluginContext(plugin, false),
() => []
);
}
}
class PluginContext {
constructor(_plugin, _container, ssr) {
this._plugin = _plugin;
this._container = _container;
this.ssr = ssr;
this.meta = this._container.minimalContext.meta;
}
_scan = false;
_resolveSkips;
_activeId = null;
_activeCode = null;
meta;
parse(code, opts) {
return parseAst(code, opts);
}
getModuleInfo(id) {
return this._container.getModuleInfo(id);
}
async resolve(id, importer, options) {
let skip;
if (options?.skipSelf !== false && this._plugin) {
skip = new Set(this._resolveSkips);
skip.add(this._plugin);
}
let out = await this._container.resolveId(id, importer, {
attributes: options?.attributes,
custom: options?.custom,
isEntry: !!options?.isEntry,
skip,
ssr: this.ssr,
scan: this._scan
});
if (typeof out === "string") out = { id: out };
return out;
}
async load(options) {
await this._container.moduleGraph?.ensureEntryFromUrl(
unwrapId$1(options.id),
this.ssr
);
this._updateModuleInfo(options.id, options);
const loadResult = await this._container.load(options.id, {
ssr: this.ssr
});
const code = typeof loadResult === "object" ? loadResult?.code : loadResult;
if (code != null) {
await this._container.transform(code, options.id, { ssr: this.ssr });
}
const moduleInfo = this.getModuleInfo(options.id);
if (!moduleInfo) throw Error(`Failed to load module with id ${options.id}`);
return moduleInfo;
}
_updateModuleInfo(id, { meta }) {
if (meta) {
const moduleInfo = this.getModuleInfo(id);
if (moduleInfo) {
moduleInfo.meta = { ...moduleInfo.meta, ...meta };
}
}
}
getModuleIds() {
return this._container.moduleGraph ? this._container.moduleGraph.idToModuleMap.keys() : Array.prototype[Symbol.iterator]();
}
addWatchFile(id) {
this._container.watchFiles.add(id);
if (this._container.watcher)
ensureWatchedFile(
this._container.watcher,
id,
this._container.config.root
);
}
getWatchFiles() {
return [...this._container.watchFiles];
}
emitFile(assetOrFile) {
this._warnIncompatibleMethod(`emitFile`);
return "";
}
setAssetSource() {
this._warnIncompatibleMethod(`setAssetSource`);
}
getFileName() {
this._warnIncompatibleMethod(`getFileName`);
return "";
}
warn(e, position) {
const err = this._formatError(typeof e === "function" ? e() : e, position);
const msg = buildErrorMessage(
err,
[colors$1.yellow(`warning: ${err.message}`)],
false
);
this._container.config.logger.warn(msg, {
clear: true,
timestamp: true
});
}
error(e, position) {
throw this._formatError(e, position);
}
debug = noop$3;
info = noop$3;
_formatError(e, position) {
const err = typeof e === "string" ? new Error(e) : e;
if (err.pluginCode) {
return err;
}
if (this._plugin) err.plugin = this._plugin.name;
if (this._activeId && !err.id) err.id = this._activeId;
if (this._activeCode) {
err.pluginCode = this._activeCode;
const pos = position ?? err.pos ?? err.position;
if (pos != null) {
let errLocation;
try {
errLocation = numberToPos(this._activeCode, pos);
} catch (err2) {
this._container.config.logger.error(
colors$1.red(
`Error in error handler:
${err2.stack || err2.message}
`
),
// print extra newline to separate the two errors
{ error: err2 }
);
throw err;
}
err.loc = err.loc || {
file: err.id,
...errLocation
};
err.frame = err.frame || generateCodeFrame(this._activeCode, pos);
} else if (err.loc) {
if (!err.frame) {
let code = this._activeCode;
if (err.loc.file) {
err.id = normalizePath$3(err.loc.file);
try {
code = fs__default.readFileSync(err.loc.file, "utf-8");
} catch {
}
}
err.frame = generateCodeFrame(code, err.loc);
}
} else if (err.line && err.column) {
err.loc = {
file: err.id,
line: err.line,
column: err.column
};
err.frame = err.frame || generateCodeFrame(this._activeCode, err.loc);
}
if (this instanceof TransformPluginContext && typeof err.loc?.line === "number" && typeof err.loc?.column === "number") {
const rawSourceMap = this._getCombinedSourcemap();
if (rawSourceMap && "version" in rawSourceMap) {
const traced = new TraceMap(rawSourceMap);
const { source, line, column } = originalPositionFor$1(traced, {
line: Number(err.loc.line),
column: Number(err.loc.column)
});
if (source && line != null && column != null) {
err.loc = { file: source, line, column };
}
}
}
} else if (err.loc) {
if (!err.frame) {
let code = err.pluginCode;
if (err.loc.file) {
err.id = normalizePath$3(err.loc.file);
if (!code) {
try {
code = fs__default.readFileSync(err.loc.file, "utf-8");
} catch {
}
}
}
if (code) {
err.frame = generateCodeFrame(`${code}`, err.loc);
}
}
}
if (typeof err.loc?.column !== "number" && typeof err.loc?.line !== "number" && !err.loc?.file) {
delete err.loc;
}
return err;
}
_warnIncompatibleMethod(method) {
this._container.config.logger.warn(
colors$1.cyan(`[plugin:${this._plugin.name}] `) + colors$1.yellow(
`context method ${colors$1.bold(
`${method}()`
)} is not supported in serve mode. This plugin is likely not vite-compatible.`
)
);
}
}
class ResolveIdContext extends PluginContext {
constructor(container, ssr, skip, scan) {
super(null, container, ssr);
this._resolveSkips = skip;
this._scan = scan;
}
}
class LoadPluginContext extends PluginContext {
_addedImports = null;
constructor(container, ssr) {
super(null, container, ssr);
}
addWatchFile(id) {
if (!this._addedImports) {
this._addedImports = /* @__PURE__ */ new Set();
}
this._addedImports.add(id);
super.addWatchFile(id);
}
}
class TransformPluginContext extends LoadPluginContext {
filename;
originalCode;
originalSourcemap = null;
sourcemapChain = [];
combinedMap = null;
constructor(container, id, code, inMap, ssr) {
super(container, ssr);
this.filename = id;
this.originalCode = code;
if (inMap) {
if (debugSourcemapCombine) {
inMap.name = "$inMap";
}
this.sourcemapChain.push(inMap);
}
}
_getCombinedSourcemap() {
if (debugSourcemapCombine && debugSourcemapCombineFilter && this.filename.includes(debugSourcemapCombineFilter)) {
debugSourcemapCombine("----------", this.filename);
debugSourcemapCombine(this.combinedMap);
debugSourcemapCombine(this.sourcemapChain);
debugSourcemapCombine("----------");
}
let combinedMap = this.combinedMap;
if (combinedMap && !("version" in combinedMap) && combinedMap.mappings === "") {
this.sourcemapChain.length = 0;
return combinedMap;
}
for (let m of this.sourcemapChain) {
if (typeof m === "string") m = JSON.parse(m);
if (!("version" in m)) {
if (m.mappings === "") {
combinedMap = { mappings: "" };
break;
}
combinedMap = null;
break;
}
if (!combinedMap) {
const sm = m;
if (sm.sources.length === 1 && !sm.sources[0]) {
combinedMap = {
...sm,
sources: [this.filename],
sourcesContent: [this.originalCode]
};
} else {
combinedMap = sm;
}
} else {
combinedMap = combineSourcemaps(cleanUrl(this.filename), [
m,
combinedMap
]);
}
}
if (combinedMap !== this.combinedMap) {
this.combinedMap = combinedMap;
this.sourcemapChain.length = 0;
}
return this.combinedMap;
}
getCombinedSourcemap() {
const map = this._getCombinedSourcemap();
if (!map || !("version" in map) && map.mappings === "") {
return new MagicString(this.originalCode).generateMap({
includeContent: true,
hires: "boundary",
source: cleanUrl(this.filename)
});
}
return map;
}
_updateActiveInfo(plugin, id, code) {
this._plugin = plugin;
this._activeId = id;
this._activeCode = code;
}
}
const debug$9 = createDebugger("vite:deps");
const htmlTypesRE = /\.(html|vue|svelte|astro|imba)$/;
const importsRE = /(?<!\/\/.*)(?<=^|;|\*\/)\s*import(?!\s+type)(?:[\w*{}\n\r\t, ]+from)?\s*("[^"]+"|'[^']+')\s*(?=$|;|\/\/|\/\*)/gm;
function scanImports(config) {
const start = performance$1.now();
const deps = {};
const missing = {};
let entries;
const scanContext = { cancelled: false };
const esbuildContext = computeEntries(
config
).then((computedEntries) => {
entries = computedEntries;
if (!entries.length) {
if (!config.optimizeDeps.entries && !config.optimizeDeps.include) {
config.logger.warn(
colors$1.yellow(
"(!) Could not auto-determine entry point from rollupOptions or html files and there are no explicit optimizeDeps.include patterns. Skipping dependency pre-bundling."
)
);
}
return;
}
if (scanContext.cancelled) return;
debug$9?.(
`Crawling dependencies using entries: ${entries.map((entry) => `
${colors$1.dim(entry)}`).join("")}`
);
return prepareEsbuildScanner(config, entries, deps, missing, scanContext);
});
const result = esbuildContext.then((context) => {
function disposeContext() {
return context?.dispose().catch((e) => {
config.logger.error("Failed to dispose esbuild context", { error: e });
});
}
if (!context || scanContext?.cancelled) {
disposeContext();
return { deps: {}, missing: {} };
}
return context.rebuild().then(() => {
return {
// Ensure a fixed order so hashes are stable and improve logs
deps: orderedDependencies(deps),
missing
};
}).finally(() => {
return disposeContext();
});
}).catch(async (e) => {
if (e.errors && e.message.includes("The build was canceled")) {
return { deps: {}, missing: {} };
}
const prependMessage = colors$1.red(` Failed to scan for dependencies from entries:
${entries.join("\n")}
`);
if (e.errors) {
const msgs = await formatMessages(e.errors, {
kind: "error",
color: true
});
e.message = prependMessage + msgs.join("\n");
} else {
e.message = prependMessage + e.message;
}
throw e;
}).finally(() => {
if (debug$9) {
const duration = (performance$1.now() - start).toFixed(2);
const depsStr = Object.keys(orderedDependencies(deps)).sort().map((id) => `
${colors$1.cyan(id)} -> ${colors$1.dim(deps[id])}`).join("") || colors$1.dim("no dependencies found");
debug$9(`Scan completed in ${duration}ms: ${depsStr}`);
}
});
return {
cancel: async () => {
scanContext.cancelled = true;
return esbuildContext.then((context) => context?.cancel());
},
result
};
}
async function computeEntries(config) {
let entries = [];
const explicitEntryPatterns = config.optimizeDeps.entries;
const buildInput = config.build.rollupOptions?.input;
if (explicitEntryPatterns) {
entries = await globEntries(explicitEntryPatterns, config);
} else if (buildInput) {
const resolvePath = (p) => path$n.resolve(config.root, p);
if (typeof buildInput === "string") {
entries = [resolvePath(buildInput)];
} else if (Array.isArray(buildInput)) {
entries = buildInput.map(resolvePath);
} else if (isObject$1(buildInput)) {
entries = Object.values(buildInput).map(resolvePath);
} else {
throw new Error("invalid rollupOptions.input value.");
}
} else {
entries = await globEntries("**/*.html", config);
}
entries = entries.filter(
(entry) => isScannable(entry, config.optimizeDeps.extensions) && fs__default.existsSync(entry)
);
return entries;
}
async function prepareEsbuildScanner(config, entries, deps, missing, scanContext) {
const container = await createPluginContainer(config);
if (scanContext?.cancelled) return;
const plugin = esbuildScanPlugin(config, container, deps, missing, entries);
const { plugins = [], ...esbuildOptions } = config.optimizeDeps?.esbuildOptions ?? {};
let tsconfigRaw = esbuildOptions.tsconfigRaw;
if (!tsconfigRaw && !esbuildOptions.tsconfig) {
const tsconfigResult = await loadTsconfigJsonForFile(
path$n.join(config.root, "_dummy.js")
);
if (tsconfigResult.compilerOptions?.experimentalDecorators) {
tsconfigRaw = { compilerOptions: { experimentalDecorators: true } };
}
}
return await esbuild.context({
absWorkingDir: process.cwd(),
write: false,
stdin: {
contents: entries.map((e) => `import ${JSON.stringify(e)}`).join("\n"),
loader: "js"
},
bundle: true,
format: "esm",
logLevel: "silent",
plugins: [...plugins, plugin],
...esbuildOptions,
tsconfigRaw
});
}
function orderedDependencies(deps) {
const depsList = Object.entries(deps);
depsList.sort((a, b) => a[0].localeCompare(b[0]));
return Object.fromEntries(depsList);
}
function globEntries(pattern, config) {
const resolvedPatterns = arraify(pattern);
if (resolvedPatterns.every((str) => !glob.isDynamicPattern(str))) {
return resolvedPatterns.map(
(p) => normalizePath$3(path$n.resolve(config.root, p))
);
}
return glob(pattern, {
cwd: config.root,
ignore: [
"**/node_modules/**",
`**/${config.build.outDir}/**`,
// if there aren't explicit entries, also ignore other common folders
...config.optimizeDeps.entries ? [] : [`**/__tests__/**`, `**/coverage/**`]
],
absolute: true,
suppressErrors: true
// suppress EACCES errors
});
}
const scriptRE = /(<script(?:\s+[a-z_:][-\w:]*(?:\s*=\s*(?:"[^"]*"|'[^']*'|[^"'<>=\s]+))?)*\s*>)(.*?)<\/script>/gis;
const commentRE = /<!--.*?-->/gs;
const srcRE = /\bsrc\s*=\s*(?:"([^"]+)"|'([^']+)'|([^\s'">]+))/i;
const typeRE = /\btype\s*=\s*(?:"([^"]+)"|'([^']+)'|([^\s'">]+))/i;
const langRE = /\blang\s*=\s*(?:"([^"]+)"|'([^']+)'|([^\s'">]+))/i;
const contextRE = /\bcontext\s*=\s*(?:"([^"]+)"|'([^']+)'|([^\s'">]+))/i;
function esbuildScanPlugin(config, container, depImports, missing, entries) {
const seen = /* @__PURE__ */ new Map();
const resolve = async (id, importer, options) => {
const key = id + (importer && path$n.dirname(importer));
if (seen.has(key)) {
return seen.get(key);
}
const resolved = await container.resolveId(
id,
importer && normalizePath$3(importer),
{
...options,
scan: true
}
);
const res = resolved?.id;
seen.set(key, res);
return res;
};
const include = config.optimizeDeps?.include;
const exclude = [
...config.optimizeDeps?.exclude || [],
"@vite/client",
"@vite/env"
];
const isUnlessEntry = (path2) => !entries.includes(path2);
const externalUnlessEntry = ({ path: path2 }) => ({
path: path2,
external: isUnlessEntry(path2)
});
const doTransformGlobImport = async (contents, id, loader) => {
let transpiledContents;
if (loader !== "js") {
transpiledContents = (await transform$1(contents, { loader })).code;
} else {
transpiledContents = contents;
}
const result = await transformGlobImport(
transpiledContents,
id,
config.root,
resolve
);
return result?.s.toString() || transpiledContents;
};
return {
name: "vite:dep-scan",
setup(build) {
const scripts = {};
build.onResolve({ filter: externalRE }, ({ path: path2 }) => ({
path: path2,
external: true
}));
build.onResolve({ filter: dataUrlRE }, ({ path: path2 }) => ({
path: path2,
external: true
}));
build.onResolve({ filter: virtualModuleRE }, ({ path: path2 }) => {
return {
// strip prefix to get valid filesystem path so esbuild can resolve imports in the file
path: path2.replace(virtualModulePrefix, ""),
namespace: "script"
};
});
build.onLoad({ filter: /.*/, namespace: "script" }, ({ path: path2 }) => {
return scripts[path2];
});
build.onResolve({ filter: htmlTypesRE }, async ({ path: path2, importer }) => {
const resolved = await resolve(path2, importer);
if (!resolved) return;
if (isInNodeModules$1(resolved) && isOptimizable(resolved, config.optimizeDeps))
return;
return {
path: resolved,
namespace: "html"
};
});
const htmlTypeOnLoadCallback = async ({ path: p }) => {
let raw = await fsp.readFile(p, "utf-8");
raw = raw.replace(commentRE, "<!---->");
const isHtml = p.endsWith(".html");
let js = "";
let scriptId = 0;
const matches = raw.matchAll(scriptRE);
for (const [, openTag, content] of matches) {
const typeMatch = typeRE.exec(openTag);
const type = typeMatch && (typeMatch[1] || typeMatch[2] || typeMatch[3]);
const langMatch = langRE.exec(openTag);
const lang = langMatch && (langMatch[1] || langMatch[2] || langMatch[3]);
if (isHtml && type !== "module") {
continue;
}
if (type && !(type.includes("javascript") || type.includes("ecmascript") || type === "module")) {
continue;
}
let loader = "js";
if (lang === "ts" || lang === "tsx" || lang === "jsx") {
loader = lang;
} else if (p.endsWith(".astro")) {
loader = "ts";
}
const srcMatch = srcRE.exec(openTag);
if (srcMatch) {
const src = srcMatch[1] || srcMatch[2] || srcMatch[3];
js += `import ${JSON.stringify(src)}
`;
} else if (content.trim()) {
const contents = content + (loader.startsWith("ts") ? extractImportPaths(content) : "");
const key = `${p}?id=${scriptId++}`;
if (contents.includes("import.meta.glob")) {
scripts[key] = {
loader: "js",
// since it is transpiled
contents: await doTransformGlobImport(contents, p, loader),
resolveDir: normalizePath$3(path$n.dirname(p)),
pluginData: {
htmlType: { loader }
}
};
} else {
scripts[key] = {
loader,
contents,
resolveDir: normalizePath$3(path$n.dirname(p)),
pluginData: {
htmlType: { loader }
}
};
}
const virtualModulePath = JSON.stringify(virtualModulePrefix + key);
const contextMatch = contextRE.exec(openTag);
const context = contextMatch && (contextMatch[1] || contextMatch[2] || contextMatch[3]);
if (p.endsWith(".svelte") && context !== "module") {
js += `import ${virtualModulePath}
`;
} else {
js += `export * from ${virtualModulePath}
`;
}
}
}
if (!p.endsWith(".vue") || !js.includes("export default")) {
js += "\nexport default {}";
}
return {
loader: "js",
contents: js
};
};
build.onLoad(
{ filter: htmlTypesRE, namespace: "html" },
htmlTypeOnLoadCallback
);
build.onLoad(
{ filter: htmlTypesRE, namespace: "file" },
htmlTypeOnLoadCallback
);
build.onResolve(
{
// avoid matching windows volume
filter: /^[\w@][^:]/
},
async ({ path: id, importer, pluginData }) => {
if (moduleListContains(exclude, id)) {
return externalUnlessEntry({ path: id });
}
if (depImports[id]) {
return externalUnlessEntry({ path: id });
}
const resolved = await resolve(id, importer, {
custom: {
depScan: { loader: pluginData?.htmlType?.loader }
}
});
if (resolved) {
if (shouldExternalizeDep(resolved, id)) {
return externalUnlessEntry({ path: id });
}
if (isInNodeModules$1(resolved) || include?.includes(id)) {
if (isOptimizable(resolved, config.optimizeDeps)) {
depImports[id] = resolved;
}
return externalUnlessEntry({ path: id });
} else if (isScannable(resolved, config.optimizeDeps.extensions)) {
const namespace = htmlTypesRE.test(resolved) ? "html" : void 0;
return {
path: path$n.resolve(resolved),
namespace
};
} else {
return externalUnlessEntry({ path: id });
}
} else {
missing[id] = normalizePath$3(importer);
}
}
);
const setupExternalize = (filter, doExternalize) => {
build.onResolve({ filter }, ({ path: path2 }) => {
return {
path: path2,
external: doExternalize(path2)
};
});
};
setupExternalize(CSS_LANGS_RE, isUnlessEntry);
setupExternalize(/\.(json|json5|wasm)$/, isUnlessEntry);
setupExternalize(
new RegExp(`\\.(${KNOWN_ASSET_TYPES.join("|")})$`),
isUnlessEntry
);
setupExternalize(SPECIAL_QUERY_RE, () => true);
build.onResolve(
{
filter: /.*/
},
async ({ path: id, importer, pluginData }) => {
const resolved = await resolve(id, importer, {
custom: {
depScan: { loader: pluginData?.htmlType?.loader }
}
});
if (resolved) {
if (shouldExternalizeDep(resolved, id) || !isScannable(resolved, config.optimizeDeps.extensions)) {
return externalUnlessEntry({ path: id });
}
const namespace = htmlTypesRE.test(resolved) ? "html" : void 0;
return {
path: path$n.resolve(cleanUrl(resolved)),
namespace
};
} else {
return externalUnlessEntry({ path: id });
}
}
);
build.onLoad({ filter: JS_TYPES_RE }, async ({ path: id }) => {
let ext = path$n.extname(id).slice(1);
if (ext === "mjs") ext = "js";
let contents = await fsp.readFile(id, "utf-8");
if (ext.endsWith("x") && config.esbuild && config.esbuild.jsxInject) {
contents = config.esbuild.jsxInject + `
` + contents;
}
const loader = config.optimizeDeps?.esbuildOptions?.loader?.[`.${ext}`] || ext;
if (contents.includes("import.meta.glob")) {
return {
loader: "js",
// since it is transpiled,
contents: await doTransformGlobImport(contents, id, loader)
};
}
return {
loader,
contents
};
});
build.onLoad({ filter: /.*/, namespace: "file" }, () => {
return {
loader: "js",
contents: "export default {}"
};
});
}
};
}
function extractImportPaths(code) {
code = code.replace(multilineCommentsRE, "/* */").replace(singlelineCommentsRE, "");
let js = "";
let m;
importsRE.lastIndex = 0;
while ((m = importsRE.exec(code)) != null) {
js += `
import ${m[1]}`;
}
return js;
}
function shouldExternalizeDep(resolvedId, rawId) {
if (!path$n.isAbsolute(resolvedId)) {
return true;
}
if (resolvedId === rawId || resolvedId.includes("\0")) {
return true;
}
return false;
}
function isScannable(id, extensions) {
return JS_TYPES_RE.test(id) || htmlTypesRE.test(id) || extensions?.includes(path$n.extname(id)) || false;
}
function createOptimizeDepsIncludeResolver(config, ssr) {
const resolve = config.createResolver({
asSrc: false,
scan: true,
ssrOptimizeCheck: ssr,
ssrConfig: config.ssr,
packageCache: /* @__PURE__ */ new Map()
});
return async (id) => {
const lastArrowIndex = id.lastIndexOf(">");
if (lastArrowIndex === -1) {
return await resolve(id, void 0, void 0, ssr);
}
const nestedRoot = id.substring(0, lastArrowIndex).trim();
const nestedPath = id.substring(lastArrowIndex + 1).trim();
const basedir = nestedResolveBasedir(
nestedRoot,
config.root,
config.resolve.preserveSymlinks
);
return await resolve(
nestedPath,
path$n.resolve(basedir, "package.json"),
void 0,
ssr
);
};
}
function expandGlobIds(id, config) {
const pkgName = getNpmPackageName(id);
if (!pkgName) return [];
const pkgData = resolvePackageData(
pkgName,
config.root,
config.resolve.preserveSymlinks,
config.packageCache
);
if (!pkgData) return [];
const pattern = "." + id.slice(pkgName.length);
const exports = pkgData.data.exports;
if (exports) {
if (typeof exports === "string" || Array.isArray(exports)) {
return [pkgName];
}
const possibleExportPaths = [];
for (const key in exports) {
if (key[0] === ".") {
if (key.includes("*")) {
const exportsValue = getFirstExportStringValue(exports[key]);
if (!exportsValue) continue;
const exportValuePattern = exportsValue.replace(/\*/g, "**/*");
const exportsValueGlobRe = new RegExp(
exportsValue.split("*").map(escapeRegex).join("(.*)")
);
possibleExportPaths.push(
...glob.sync(exportValuePattern, {
cwd: pkgData.dir,
ignore: ["node_modules"]
}).map((filePath) => {
if (exportsValue.startsWith("./") && !filePath.startsWith("./")) {
filePath = "./" + filePath;
}
const matched2 = exportsValueGlobRe.exec(slash$1(filePath));
if (matched2) {
let allGlobSame = matched2.length === 2;
if (!allGlobSame) {
allGlobSame = true;
for (let i = 2; i < matched2.length; i++) {
if (matched2[i] !== matched2[i - 1]) {
allGlobSame = false;
break;
}
}
}
if (allGlobSame) {
return key.replace("*", matched2[1]).slice(2);
}
}
return "";
}).filter(Boolean)
);
} else {
possibleExportPaths.push(key.slice(2));
}
}
}
const matched = micromatch$2(possibleExportPaths, pattern).map(
(match) => path$n.posix.join(pkgName, match)
);
matched.unshift(pkgName);
return matched;
} else {
const matched = glob.sync(pattern, { cwd: pkgData.dir, ignore: ["node_modules"] }).map((match) => path$n.posix.join(pkgName, slash$1(match)));
matched.unshift(pkgName);
return matched;
}
}
function getFirstExportStringValue(obj) {
if (typeof obj === "string") {
return obj;
} else if (Array.isArray(obj)) {
return obj[0];
} else {
for (const key in obj) {
return getFirstExportStringValue(obj[key]);
}
}
}
function nestedResolveBasedir(id, basedir, preserveSymlinks = false) {
const pkgs = id.split(">").map((pkg) => pkg.trim());
for (const pkg of pkgs) {
basedir = resolvePackageData(pkg, basedir, preserveSymlinks)?.dir || basedir;
}
return basedir;
}
const debug$8 = createDebugger("vite:deps");
const debounceMs = 100;
const depsOptimizerMap = /* @__PURE__ */ new WeakMap();
const devSsrDepsOptimizerMap = /* @__PURE__ */ new WeakMap();
function getDepsOptimizer(config, ssr) {
return (ssr ? devSsrDepsOptimizerMap : depsOptimizerMap).get(config);
}
async function initDepsOptimizer(config, server) {
if (!getDepsOptimizer(config, false)) {
await createDepsOptimizer(config, server);
}
}
let creatingDevSsrOptimizer;
async function initDevSsrDepsOptimizer(config, server) {
if (getDepsOptimizer(config, true)) {
return;
}
if (creatingDevSsrOptimizer) {
return creatingDevSsrOptimizer;
}
creatingDevSsrOptimizer = async function() {
const ssr = false;
if (!getDepsOptimizer(config, ssr)) {
await initDepsOptimizer(config, server);
}
await getDepsOptimizer(config, ssr).scanProcessing;
await createDevSsrDepsOptimizer(config);
creatingDevSsrOptimizer = void 0;
}();
return await creatingDevSsrOptimizer;
}
async function createDepsOptimizer(config, server) {
const { logger } = config;
const ssr = false;
const sessionTimestamp = Date.now().toString();
const cachedMetadata = await loadCachedDepOptimizationMetadata(config, ssr);
let debounceProcessingHandle;
let closed = false;
let metadata = cachedMetadata || initDepsOptimizerMetadata(config, ssr, sessionTimestamp);
const options = getDepOptimizationConfig(config, ssr);
const { noDiscovery, holdUntilCrawlEnd } = options;
const depsOptimizer = {
metadata,
registerMissingImport,
run: () => debouncedProcessing(0),
isOptimizedDepFile: createIsOptimizedDepFile(config),
isOptimizedDepUrl: createIsOptimizedDepUrl(config),
getOptimizedDepId: (depInfo) => `${depInfo.file}?v=${depInfo.browserHash}`,
close,
options
};
depsOptimizerMap.set(config, depsOptimizer);
let newDepsDiscovered = false;
let newDepsToLog = [];
let newDepsToLogHandle;
const logNewlyDiscoveredDeps = () => {
if (newDepsToLog.length) {
logger.info(
colors$1.green(
`\u2728 new dependencies optimized: ${depsLogString(newDepsToLog)}`
),
{
timestamp: true
}
);
newDepsToLog = [];
}
};
let discoveredDepsWhileScanning = [];
const logDiscoveredDepsWhileScanning = () => {
if (discoveredDepsWhileScanning.length) {
logger.info(
colors$1.green(
`\u2728 discovered while scanning: ${depsLogString(
discoveredDepsWhileScanning
)}`
),
{
timestamp: true
}
);
discoveredDepsWhileScanning = [];
}
};
let depOptimizationProcessing = promiseWithResolvers();
let depOptimizationProcessingQueue = [];
const resolveEnqueuedProcessingPromises = () => {
for (const processing of depOptimizationProcessingQueue) {
processing.resolve();
}
depOptimizationProcessingQueue = [];
};
let enqueuedRerun;
let currentlyProcessing = false;
let firstRunCalled = !!cachedMetadata;
let warnAboutMissedDependencies = false;
let waitingForCrawlEnd = false;
if (!cachedMetadata) {
server._onCrawlEnd(onCrawlEnd);
waitingForCrawlEnd = true;
}
let optimizationResult;
let discover;
async function close() {
closed = true;
await Promise.allSettled([
discover?.cancel(),
depsOptimizer.scanProcessing,
optimizationResult?.cancel()
]);
}
if (!cachedMetadata) {
currentlyProcessing = true;
const manuallyIncludedDeps = {};
await addManuallyIncludedOptimizeDeps(manuallyIncludedDeps, config, ssr);
const manuallyIncludedDepsInfo = toDiscoveredDependencies(
config,
manuallyIncludedDeps,
ssr,
sessionTimestamp
);
for (const depInfo of Object.values(manuallyIncludedDepsInfo)) {
addOptimizedDepInfo(metadata, "discovered", {
...depInfo,
processing: depOptimizationProcessing.promise
});
newDepsDiscovered = true;
}
if (noDiscovery) {
runOptimizer();
} else {
depsOptimizer.scanProcessing = new Promise((resolve) => {
(async () => {
try {
debug$8?.(colors$1.green(`scanning for dependencies...`));
discover = discoverProjectDependencies(config);
const deps = await discover.result;
discover = void 0;
const manuallyIncluded = Object.keys(manuallyIncludedDepsInfo);
discoveredDepsWhileScanning.push(
...Object.keys(metadata.discovered).filter(
(dep) => !deps[dep] && !manuallyIncluded.includes(dep)
)
);
for (const id of Object.keys(deps)) {
if (!metadata.discovered[id]) {
addMissingDep(id, deps[id]);
}
}
const knownDeps = prepareKnownDeps();
startNextDiscoveredBatch();
optimizationResult = runOptimizeDeps(config, knownDeps, ssr);
if (!holdUntilCrawlEnd) {
optimizationResult.result.then((result) => {
if (!waitingForCrawlEnd) return;
optimizationResult = void 0;
runOptimizer(result);
});
}
} catch (e) {
logger.error(e.stack || e.message);
} finally {
resolve();
depsOptimizer.scanProcessing = void 0;
}
})();
});
}
}
function startNextDiscoveredBatch() {
newDepsDiscovered = false;
depOptimizationProcessingQueue.push(depOptimizationProcessing);
depOptimizationProcessing = promiseWithResolvers();
}
function prepareKnownDeps() {
const knownDeps = {};
for (const dep of Object.keys(metadata.optimized)) {
knownDeps[dep] = { ...metadata.optimized[dep] };
}
for (const dep of Object.keys(metadata.discovered)) {
const { processing, ...info } = metadata.discovered[dep];
knownDeps[dep] = info;
}
return knownDeps;
}
async function runOptimizer(preRunResult) {
const isRerun = firstRunCalled;
firstRunCalled = true;
enqueuedRerun = void 0;
if (debounceProcessingHandle) clearTimeout(debounceProcessingHandle);
if (closed) {
currentlyProcessing = false;
return;
}
currentlyProcessing = true;
try {
let processingResult;
if (preRunResult) {
processingResult = preRunResult;
} else {
const knownDeps = prepareKnownDeps();
startNextDiscoveredBatch();
optimizationResult = runOptimizeDeps(config, knownDeps, ssr);
processingResult = await optimizationResult.result;
optimizationResult = void 0;
}
if (closed) {
currentlyProcessing = false;
processingResult.cancel();
resolveEnqueuedProcessingPromises();
return;
}
const newData = processingResult.metadata;
const needsInteropMismatch = findInteropMismatches(
metadata.discovered,
newData.optimized
);
const needsReload = needsInteropMismatch.length > 0 || metadata.hash !== newData.hash || Object.keys(metadata.optimized).some((dep) => {
return metadata.optimized[dep].fileHash !== newData.optimized[dep].fileHash;
});
const commitProcessing = async () => {
await processingResult.commit();
for (const id in metadata.discovered) {
if (!newData.optimized[id]) {
addOptimizedDepInfo(newData, "discovered", metadata.discovered[id]);
}
}
if (!needsReload) {
newData.browserHash = metadata.browserHash;
for (const dep in newData.chunks) {
newData.chunks[dep].browserHash = metadata.browserHash;
}
for (const dep in newData.optimized) {
newData.optimized[dep].browserHash = (metadata.optimized[dep] || metadata.discovered[dep]).browserHash;
}
}
for (const o in newData.optimized) {
const discovered = metadata.discovered[o];
if (discovered) {
const optimized = newData.optimized[o];
discovered.browserHash = optimized.browserHash;
discovered.fileHash = optimized.fileHash;
discovered.needsInterop = optimized.needsInterop;
discovered.processing = void 0;
}
}
if (isRerun) {
newDepsToLog.push(
...Object.keys(newData.optimized).filter(
(dep) => !metadata.optimized[dep]
)
);
}
metadata = depsOptimizer.metadata = newData;
resolveEnqueuedProcessingPromises();
};
if (!needsReload) {
await commitProcessing();
if (!debug$8) {
if (newDepsToLogHandle) clearTimeout(newDepsToLogHandle);
newDepsToLogHandle = setTimeout(() => {
newDepsToLogHandle = void 0;
logNewlyDiscoveredDeps();
if (warnAboutMissedDependencies) {
logDiscoveredDepsWhileScanning();
logger.info(
colors$1.magenta(
`\u2757 add these dependencies to optimizeDeps.include to speed up cold start`
),
{ timestamp: true }
);
warnAboutMissedDependencies = false;
}
}, 2 * debounceMs);
} else {
debug$8(
colors$1.green(
`\u2728 ${!isRerun ? `dependencies optimized` : `optimized dependencies unchanged`}`
)
);
}
} else {
if (newDepsDiscovered) {
processingResult.cancel();
debug$8?.(
colors$1.green(
`\u2728 delaying reload as new dependencies have been found...`
)
);
} else {
await commitProcessing();
if (!debug$8) {
if (newDepsToLogHandle) clearTimeout(newDepsToLogHandle);
newDepsToLogHandle = void 0;
logNewlyDiscoveredDeps();
if (warnAboutMissedDependencies) {
logDiscoveredDepsWhileScanning();
logger.info(
colors$1.magenta(
`\u2757 add these dependencies to optimizeDeps.include to avoid a full page reload during cold start`
),
{ timestamp: true }
);
warnAboutMissedDependencies = false;
}
}
logger.info(
colors$1.green(`\u2728 optimized dependencies changed. reloading`),
{
timestamp: true
}
);
if (needsInteropMismatch.length > 0) {
logger.warn(
`Mixed ESM and CJS detected in ${colors$1.yellow(
needsInteropMismatch.join(", ")
)}, add ${needsInteropMismatch.length === 1 ? "it" : "them"} to optimizeDeps.needsInterop to speed up cold start`,
{
timestamp: true
}
);
}
fullReload();
}
}
} catch (e) {
logger.error(
colors$1.red(`error while updating dependencies:
${e.stack}`),
{ timestamp: true, error: e }
);
resolveEnqueuedProcessingPromises();
metadata.discovered = {};
}
currentlyProcessing = false;
enqueuedRerun?.();
}
function fullReload() {
server.moduleGraph.invalidateAll();
server.hot.send({
type: "full-reload",
path: "*"
});
}
async function rerun() {
const deps = Object.keys(metadata.discovered);
const depsString = depsLogString(deps);
debug$8?.(colors$1.green(`new dependencies found: ${depsString}`));
runOptimizer();
}
function getDiscoveredBrowserHash(hash, deps, missing) {
return getHash(
hash + JSON.stringify(deps) + JSON.stringify(missing) + sessionTimestamp
);
}
function registerMissingImport(id, resolved) {
const optimized = metadata.optimized[id];
if (optimized) {
return optimized;
}
const chunk = metadata.chunks[id];
if (chunk) {
return chunk;
}
let missing = metadata.discovered[id];
if (missing) {
return missing;
}
missing = addMissingDep(id, resolved);
if (!waitingForCrawlEnd) {
debouncedProcessing();
}
return missing;
}
function addMissingDep(id, resolved) {
newDepsDiscovered = true;
return addOptimizedDepInfo(metadata, "discovered", {
id,
file: getOptimizedDepPath(id, config, ssr),
src: resolved,
// Adding a browserHash to this missing dependency that is unique to
// the current state of known + missing deps. If its optimizeDeps run
// doesn't alter the bundled files of previous known dependencies,
// we don't need a full reload and this browserHash will be kept
browserHash: getDiscoveredBrowserHash(
metadata.hash,
depsFromOptimizedDepInfo(metadata.optimized),
depsFromOptimizedDepInfo(metadata.discovered)
),
// loading of this pre-bundled dep needs to await for its processing
// promise to be resolved
processing: depOptimizationProcessing.promise,
exportsData: extractExportsData(resolved, config, ssr)
});
}
function debouncedProcessing(timeout = debounceMs) {
enqueuedRerun = void 0;
if (debounceProcessingHandle) clearTimeout(debounceProcessingHandle);
if (newDepsToLogHandle) clearTimeout(newDepsToLogHandle);
newDepsToLogHandle = void 0;
debounceProcessingHandle = setTimeout(() => {
debounceProcessingHandle = void 0;
enqueuedRerun = rerun;
if (!currentlyProcessing) {
enqueuedRerun();
}
}, timeout);
}
async function onCrawlEnd() {
waitingForCrawlEnd = false;
debug$8?.(colors$1.green(`\u2728 static imports crawl ended`));
if (closed) {
return;
}
await depsOptimizer.scanProcessing;
if (optimizationResult && !config.optimizeDeps.noDiscovery) {
const afterScanResult = optimizationResult.result;
optimizationResult = void 0;
const result = await afterScanResult;
currentlyProcessing = false;
const crawlDeps = Object.keys(metadata.discovered);
const scanDeps = Object.keys(result.metadata.optimized);
if (scanDeps.length === 0 && crawlDeps.length === 0) {
debug$8?.(
colors$1.green(
`\u2728 no dependencies found by the scanner or crawling static imports`
)
);
startNextDiscoveredBatch();
runOptimizer(result);
return;
}
const needsInteropMismatch = findInteropMismatches(
metadata.discovered,
result.metadata.optimized
);
const scannerMissedDeps = crawlDeps.some((dep) => !scanDeps.includes(dep));
const outdatedResult = needsInteropMismatch.length > 0 || scannerMissedDeps;
if (outdatedResult) {
result.cancel();
for (const dep of scanDeps) {
if (!crawlDeps.includes(dep)) {
addMissingDep(dep, result.metadata.optimized[dep].src);
}
}
if (scannerMissedDeps) {
debug$8?.(
colors$1.yellow(
`\u2728 new dependencies were found while crawling that weren't detected by the scanner`
)
);
}
debug$8?.(colors$1.green(`\u2728 re-running optimizer`));
debouncedProcessing(0);
} else {
debug$8?.(
colors$1.green(
`\u2728 using post-scan optimizer result, the scanner found every used dependency`
)
);
startNextDiscoveredBatch();
runOptimizer(result);
}
} else if (!holdUntilCrawlEnd) {
if (newDepsDiscovered) {
debug$8?.(
colors$1.green(
`\u2728 new dependencies were found while crawling static imports, re-running optimizer`
)
);
warnAboutMissedDependencies = true;
debouncedProcessing(0);
}
} else {
const crawlDeps = Object.keys(metadata.discovered);
currentlyProcessing = false;
if (crawlDeps.length === 0) {
debug$8?.(
colors$1.green(
`\u2728 no dependencies found while crawling the static imports`
)
);
firstRunCalled = true;
}
debouncedProcessing(0);
}
}
}
async function createDevSsrDepsOptimizer(config) {
const metadata = await optimizeServerSsrDeps(config);
const depsOptimizer = {
metadata,
isOptimizedDepFile: createIsOptimizedDepFile(config),
isOptimizedDepUrl: createIsOptimizedDepUrl(config),
getOptimizedDepId: (depInfo) => `${depInfo.file}?v=${depInfo.browserHash}`,
registerMissingImport: () => {
throw new Error(
"Vite Internal Error: registerMissingImport is not supported in dev SSR"
);
},
// noop, there is no scanning during dev SSR
// the optimizer blocks the server start
run: () => {
},
close: async () => {
},
options: config.ssr.optimizeDeps
};
devSsrDepsOptimizerMap.set(config, depsOptimizer);
}
function findInteropMismatches(discovered, optimized) {
const needsInteropMismatch = [];
for (const dep in discovered) {
const discoveredDepInfo = discovered[dep];
if (discoveredDepInfo.needsInterop === void 0) continue;
const depInfo = optimized[dep];
if (!depInfo) continue;
if (depInfo.needsInterop !== discoveredDepInfo.needsInterop) {
needsInteropMismatch.push(dep);
debug$8?.(colors$1.cyan(`\u2728 needsInterop mismatch detected for ${dep}`));
}
}
return needsInteropMismatch;
}
const debug$7 = createDebugger("vite:deps");
const jsExtensionRE = /\.js$/i;
const jsMapExtensionRE = /\.js\.map$/i;
async function optimizeDeps(config, force = config.optimizeDeps.force, asCommand = false) {
const log = asCommand ? config.logger.info : debug$7;
const ssr = false;
const cachedMetadata = await loadCachedDepOptimizationMetadata(
config,
ssr,
force,
asCommand
);
if (cachedMetadata) {
return cachedMetadata;
}
const deps = await discoverProjectDependencies(config).result;
await addManuallyIncludedOptimizeDeps(deps, config, ssr);
const depsString = depsLogString(Object.keys(deps));
log?.(colors$1.green(`Optimizing dependencies:
${depsString}`));
const depsInfo = toDiscoveredDependencies(config, deps, ssr);
const result = await runOptimizeDeps(config, depsInfo, ssr).result;
await result.commit();
return result.metadata;
}
async function optimizeServerSsrDeps(config) {
const ssr = true;
const cachedMetadata = await loadCachedDepOptimizationMetadata(
config,
ssr,
config.optimizeDeps.force,
false
);
if (cachedMetadata) {
return cachedMetadata;
}
const deps = {};
await addManuallyIncludedOptimizeDeps(deps, config, ssr);
const depsInfo = toDiscoveredDependencies(config, deps, ssr);
const result = await runOptimizeDeps(config, depsInfo, ssr).result;
await result.commit();
return result.metadata;
}
function initDepsOptimizerMetadata(config, ssr, timestamp) {
const { lockfileHash, configHash, hash } = getDepHash(config, ssr);
return {
hash,
lockfileHash,
configHash,
browserHash: getOptimizedBrowserHash(hash, {}, timestamp),
optimized: {},
chunks: {},
discovered: {},
depInfoList: []
};
}
function addOptimizedDepInfo(metadata, type, depInfo) {
metadata[type][depInfo.id] = depInfo;
metadata.depInfoList.push(depInfo);
return depInfo;
}
let firstLoadCachedDepOptimizationMetadata = true;
async function loadCachedDepOptimizationMetadata(config, ssr, force = config.optimizeDeps.force, asCommand = false) {
const log = asCommand ? config.logger.info : debug$7;
if (firstLoadCachedDepOptimizationMetadata) {
firstLoadCachedDepOptimizationMetadata = false;
setTimeout(() => cleanupDepsCacheStaleDirs(config), 0);
}
const depsCacheDir = getDepsCacheDir(config, ssr);
if (!force) {
let cachedMetadata;
try {
const cachedMetadataPath = path$n.join(depsCacheDir, METADATA_FILENAME);
cachedMetadata = parseDepsOptimizerMetadata(
await fsp.readFile(cachedMetadataPath, "utf-8"),
depsCacheDir
);
} catch (e) {
}
if (cachedMetadata) {
if (cachedMetadata.lockfileHash !== getLockfileHash(config)) {
config.logger.info(
"Re-optimizing dependencies because lockfile has changed"
);
} else if (cachedMetadata.configHash !== getConfigHash(config, ssr)) {
config.logger.info(
"Re-optimizing dependencies because vite config has changed"
);
} else {
log?.("Hash is consistent. Skipping. Use --force to override.");
return cachedMetadata;
}
}
} else {
config.logger.info("Forced re-optimization of dependencies");
}
debug$7?.(colors$1.green(`removing old cache dir ${depsCacheDir}`));
await fsp.rm(depsCacheDir, { recursive: true, force: true });
}
function discoverProjectDependencies(config) {
const { cancel, result } = scanImports(config);
return {
cancel,
result: result.then(({ deps, missing }) => {
const missingIds = Object.keys(missing);
if (missingIds.length) {
throw new Error(
`The following dependencies are imported but could not be resolved:
${missingIds.map(
(id) => `${colors$1.cyan(id)} ${colors$1.white(
colors$1.dim(`(imported by ${missing[id]})`)
)}`
).join(`
`)}
Are they installed?`
);
}
return deps;
})
};
}
function toDiscoveredDependencies(config, deps, ssr, timestamp) {
const browserHash = getOptimizedBrowserHash(
getDepHash(config, ssr).hash,
deps,
timestamp
);
const discovered = {};
for (const id in deps) {
const src = deps[id];
discovered[id] = {
id,
file: getOptimizedDepPath(id, config, ssr),
src,
browserHash,
exportsData: extractExportsData(src, config, ssr)
};
}
return discovered;
}
function depsLogString(qualifiedIds) {
return colors$1.yellow(qualifiedIds.join(`, `));
}
function runOptimizeDeps(resolvedConfig, depsInfo, ssr) {
const optimizerContext = { cancelled: false };
const config = {
...resolvedConfig,
command: "build"
};
const depsCacheDir = getDepsCacheDir(resolvedConfig, ssr);
const processingCacheDir = getProcessingDepsCacheDir(resolvedConfig, ssr);
fs__default.mkdirSync(processingCacheDir, { recursive: true });
debug$7?.(colors$1.green(`creating package.json in ${processingCacheDir}`));
fs__default.writeFileSync(
path$n.resolve(processingCacheDir, "package.json"),
`{
"type": "module"
}
`
);
const metadata = initDepsOptimizerMetadata(config, ssr);
metadata.browserHash = getOptimizedBrowserHash(
metadata.hash,
depsFromOptimizedDepInfo(depsInfo)
);
const qualifiedIds = Object.keys(depsInfo);
let cleaned = false;
let committed = false;
const cleanUp = () => {
if (!cleaned && !committed) {
cleaned = true;
debug$7?.(colors$1.green(`removing cache dir ${processingCacheDir}`));
try {
fs__default.rmSync(processingCacheDir, { recursive: true, force: true });
} catch (error) {
}
}
};
const successfulResult = {
metadata,
cancel: cleanUp,
commit: async () => {
if (cleaned) {
throw new Error(
"Can not commit a Deps Optimization run as it was cancelled"
);
}
committed = true;
const dataPath = path$n.join(processingCacheDir, METADATA_FILENAME);
debug$7?.(
colors$1.green(`creating ${METADATA_FILENAME} in ${processingCacheDir}`)
);
fs__default.writeFileSync(
dataPath,
stringifyDepsOptimizerMetadata(metadata, depsCacheDir)
);
const temporaryPath = depsCacheDir + getTempSuffix();
const depsCacheDirPresent = fs__default.existsSync(depsCacheDir);
if (isWindows$3) {
if (depsCacheDirPresent) {
debug$7?.(colors$1.green(`renaming ${depsCacheDir} to ${temporaryPath}`));
await safeRename(depsCacheDir, temporaryPath);
}
debug$7?.(
colors$1.green(`renaming ${processingCacheDir} to ${depsCacheDir}`)
);
await safeRename(processingCacheDir, depsCacheDir);
} else {
if (depsCacheDirPresent) {
debug$7?.(colors$1.green(`renaming ${depsCacheDir} to ${temporaryPath}`));
fs__default.renameSync(depsCacheDir, temporaryPath);
}
debug$7?.(
colors$1.green(`renaming ${processingCacheDir} to ${depsCacheDir}`)
);
fs__default.renameSync(processingCacheDir, depsCacheDir);
}
if (depsCacheDirPresent) {
debug$7?.(colors$1.green(`removing cache temp dir ${temporaryPath}`));
fsp.rm(temporaryPath, { recursive: true, force: true });
}
}
};
if (!qualifiedIds.length) {
return {
cancel: async () => cleanUp(),
result: Promise.resolve(successfulResult)
};
}
const cancelledResult = {
metadata,
commit: async () => cleanUp(),
cancel: cleanUp
};
const start = performance$1.now();
const preparedRun = prepareEsbuildOptimizerRun(
resolvedConfig,
depsInfo,
ssr,
processingCacheDir,
optimizerContext
);
const runResult = preparedRun.then(({ context, idToExports }) => {
function disposeContext() {
return context?.dispose().catch((e) => {
config.logger.error("Failed to dispose esbuild context", { error: e });
});
}
if (!context || optimizerContext.cancelled) {
disposeContext();
return cancelledResult;
}
return context.rebuild().then((result) => {
const meta = result.metafile;
const processingCacheDirOutputPath = path$n.relative(
process.cwd(),
processingCacheDir
);
for (const id in depsInfo) {
const output = esbuildOutputFromId(
meta.outputs,
id,
processingCacheDir
);
const { exportsData, ...info } = depsInfo[id];
addOptimizedDepInfo(metadata, "optimized", {
...info,
// We only need to hash the output.imports in to check for stability, but adding the hash
// and file path gives us a unique hash that may be useful for other things in the future
fileHash: getHash(
metadata.hash + depsInfo[id].file + JSON.stringify(output.imports)
),
browserHash: metadata.browserHash,
// After bundling we have more information and can warn the user about legacy packages
// that require manual configuration
needsInterop: needsInterop(
config,
ssr,
id,
idToExports[id],
output
)
});
}
for (const o of Object.keys(meta.outputs)) {
if (!jsMapExtensionRE.test(o)) {
const id = path$n.relative(processingCacheDirOutputPath, o).replace(jsExtensionRE, "");
const file = getOptimizedDepPath(id, resolvedConfig, ssr);
if (!findOptimizedDepInfoInRecord(
metadata.optimized,
(depInfo) => depInfo.file === file
)) {
addOptimizedDepInfo(metadata, "chunks", {
id,
file,
needsInterop: false,
browserHash: metadata.browserHash
});
}
}
}
debug$7?.(
`Dependencies bundled in ${(performance$1.now() - start).toFixed(2)}ms`
);
return successfulResult;
}).catch((e) => {
if (e.errors && e.message.includes("The build was canceled")) {
return cancelledResult;
}
throw e;
}).finally(() => {
return disposeContext();
});
});
runResult.catch(() => {
cleanUp();
});
return {
async cancel() {
optimizerContext.cancelled = true;
const { context } = await preparedRun;
await context?.cancel();
cleanUp();
},
result: runResult
};
}
async function prepareEsbuildOptimizerRun(resolvedConfig, depsInfo, ssr, processingCacheDir, optimizerContext) {
const config = {
...resolvedConfig,
command: "build"
};
const flatIdDeps = {};
const idToExports = {};
const optimizeDeps2 = getDepOptimizationConfig(config, ssr);
const { plugins: pluginsFromConfig = [], ...esbuildOptions } = optimizeDeps2?.esbuildOptions ?? {};
await Promise.all(
Object.keys(depsInfo).map(async (id) => {
const src = depsInfo[id].src;
const exportsData = await (depsInfo[id].exportsData ?? extractExportsData(src, config, ssr));
if (exportsData.jsxLoader && !esbuildOptions.loader?.[".js"]) {
esbuildOptions.loader = {
".js": "jsx",
...esbuildOptions.loader
};
}
const flatId = flattenId(id);
flatIdDeps[flatId] = src;
idToExports[id] = exportsData;
})
);
if (optimizerContext.cancelled) return { context: void 0, idToExports };
const define = {
"process.env.NODE_ENV": JSON.stringify(process.env.NODE_ENV || config.mode)
};
const platform = ssr && config.ssr?.target !== "webworker" ? "node" : "browser";
const external = [...optimizeDeps2?.exclude ?? []];
const plugins = [...pluginsFromConfig];
if (external.length) {
plugins.push(esbuildCjsExternalPlugin(external, platform));
}
plugins.push(esbuildDepPlugin(flatIdDeps, external, config, ssr));
const context = await esbuild.context({
absWorkingDir: process.cwd(),
entryPoints: Object.keys(flatIdDeps),
bundle: true,
// We can't use platform 'neutral', as esbuild has custom handling
// when the platform is 'node' or 'browser' that can't be emulated
// by using mainFields and conditions
platform,
define,
format: "esm",
// See https://github.com/evanw/esbuild/issues/1921#issuecomment-1152991694
banner: platform === "node" ? {
js: `import { createRequire } from 'module';const require = createRequire(import.meta.url);`
} : void 0,
target: ESBUILD_MODULES_TARGET,
external,
logLevel: "error",
splitting: true,
sourcemap: true,
outdir: processingCacheDir,
ignoreAnnotations: true,
metafile: true,
plugins,
charset: "utf8",
...esbuildOptions,
supported: {
...defaultEsbuildSupported,
...esbuildOptions.supported
}
});
return { context, idToExports };
}
async function addManuallyIncludedOptimizeDeps(deps, config, ssr) {
const { logger } = config;
const optimizeDeps2 = getDepOptimizationConfig(config, ssr);
const optimizeDepsInclude = optimizeDeps2?.include ?? [];
if (optimizeDepsInclude.length) {
const unableToOptimize = (id, msg) => {
if (optimizeDepsInclude.includes(id)) {
logger.warn(
`${msg}: ${colors$1.cyan(id)}, present in '${ssr ? "ssr." : ""}optimizeDeps.include'`
);
}
};
const includes = [...optimizeDepsInclude];
for (let i = 0; i < includes.length; i++) {
const id = includes[i];
if (glob.isDynamicPattern(id)) {
const globIds = expandGlobIds(id, config);
includes.splice(i, 1, ...globIds);
i += globIds.length - 1;
}
}
const resolve = createOptimizeDepsIncludeResolver(config, ssr);
for (const id of includes) {
const normalizedId = normalizeId(id);
if (!deps[normalizedId]) {
const entry = await resolve(id);
if (entry) {
if (isOptimizable(entry, optimizeDeps2)) {
if (!entry.endsWith("?__vite_skip_optimization")) {
deps[normalizedId] = entry;
}
} else {
unableToOptimize(id, "Cannot optimize dependency");
}
} else {
unableToOptimize(id, "Failed to resolve dependency");
}
}
}
}
}
function depsFromOptimizedDepInfo(depsInfo) {
const obj = {};
for (const key in depsInfo) {
obj[key] = depsInfo[key].src;
}
return obj;
}
function getOptimizedDepPath(id, config, ssr) {
return normalizePath$3(
path$n.resolve(getDepsCacheDir(config, ssr), flattenId(id) + ".js")
);
}
function getDepsCacheSuffix(ssr) {
return ssr ? "_ssr" : "";
}
function getDepsCacheDir(config, ssr) {
return getDepsCacheDirPrefix(config) + getDepsCacheSuffix(ssr);
}
function getProcessingDepsCacheDir(config, ssr) {
return getDepsCacheDirPrefix(config) + getDepsCacheSuffix(ssr) + getTempSuffix();
}
function getTempSuffix() {
return "_temp_" + getHash(
`${process.pid}:${Date.now().toString()}:${Math.random().toString(16).slice(2)}`
);
}
function getDepsCacheDirPrefix(config) {
return normalizePath$3(path$n.resolve(config.cacheDir, "deps"));
}
function createIsOptimizedDepFile(config) {
const depsCacheDirPrefix = getDepsCacheDirPrefix(config);
return (id) => id.startsWith(depsCacheDirPrefix);
}
function createIsOptimizedDepUrl(config) {
const { root } = config;
const depsCacheDir = getDepsCacheDirPrefix(config);
const depsCacheDirRelative = normalizePath$3(path$n.relative(root, depsCacheDir));
const depsCacheDirPrefix = depsCacheDirRelative.startsWith("../") ? (
// if the cache directory is outside root, the url prefix would be something
// like '/@fs/absolute/path/to/node_modules/.vite'
`/@fs/${removeLeadingSlash(normalizePath$3(depsCacheDir))}`
) : (
// if the cache directory is inside root, the url prefix would be something
// like '/node_modules/.vite'
`/${depsCacheDirRelative}`
);
return function isOptimizedDepUrl(url) {
return url.startsWith(depsCacheDirPrefix);
};
}
function parseDepsOptimizerMetadata(jsonMetadata, depsCacheDir) {
const { hash, lockfileHash, configHash, browserHash, optimized, chunks } = JSON.parse(jsonMetadata, (key, value) => {
if (key === "file" || key === "src") {
return normalizePath$3(path$n.resolve(depsCacheDir, value));
}
return value;
});
if (!chunks || Object.values(optimized).some((depInfo) => !depInfo.fileHash)) {
return;
}
const metadata = {
hash,
lockfileHash,
configHash,
browserHash,
optimized: {},
discovered: {},
chunks: {},
depInfoList: []
};
for (const id of Object.keys(optimized)) {
addOptimizedDepInfo(metadata, "optimized", {
...optimized[id],
id,
browserHash
});
}
for (const id of Object.keys(chunks)) {
addOptimizedDepInfo(metadata, "chunks", {
...chunks[id],
id,
browserHash,
needsInterop: false
});
}
return metadata;
}
function stringifyDepsOptimizerMetadata(metadata, depsCacheDir) {
const { hash, configHash, lockfileHash, browserHash, optimized, chunks } = metadata;
return JSON.stringify(
{
hash,
configHash,
lockfileHash,
browserHash,
optimized: Object.fromEntries(
Object.values(optimized).map(
({ id, src, file, fileHash, needsInterop: needsInterop2 }) => [
id,
{
src,
file,
fileHash,
needsInterop: needsInterop2
}
]
)
),
chunks: Object.fromEntries(
Object.values(chunks).map(({ id, file }) => [id, { file }])
)
},
(key, value) => {
if (key === "file" || key === "src") {
return normalizePath$3(path$n.relative(depsCacheDir, value));
}
return value;
},
2
);
}
function esbuildOutputFromId(outputs, id, cacheDirOutputPath) {
const cwd = process.cwd();
const flatId = flattenId(id) + ".js";
const normalizedOutputPath = normalizePath$3(
path$n.relative(cwd, path$n.join(cacheDirOutputPath, flatId))
);
const output = outputs[normalizedOutputPath];
if (output) {
return output;
}
for (const [key, value] of Object.entries(outputs)) {
if (normalizePath$3(path$n.relative(cwd, key)) === normalizedOutputPath) {
return value;
}
}
}
async function extractExportsData(filePath, config, ssr) {
await init;
const optimizeDeps2 = getDepOptimizationConfig(config, ssr);
const esbuildOptions = optimizeDeps2?.esbuildOptions ?? {};
if (optimizeDeps2.extensions?.some((ext) => filePath.endsWith(ext))) {
const result = await build$3({
...esbuildOptions,
entryPoints: [filePath],
write: false,
format: "esm"
});
const [, exports2, , hasModuleSyntax2] = parse$d(result.outputFiles[0].text);
return {
hasModuleSyntax: hasModuleSyntax2,
exports: exports2.map((e) => e.n)
};
}
let parseResult;
let usedJsxLoader = false;
const entryContent = await fsp.readFile(filePath, "utf-8");
try {
parseResult = parse$d(entryContent);
} catch {
const loader = esbuildOptions.loader?.[path$n.extname(filePath)] || "jsx";
debug$7?.(
`Unable to parse: ${filePath}.
Trying again with a ${loader} transform.`
);
const transformed = await transformWithEsbuild(entryContent, filePath, {
loader
});
parseResult = parse$d(transformed.code);
usedJsxLoader = true;
}
const [, exports, , hasModuleSyntax] = parseResult;
const exportsData = {
hasModuleSyntax,
exports: exports.map((e) => e.n),
jsxLoader: usedJsxLoader
};
return exportsData;
}
function needsInterop(config, ssr, id, exportsData, output) {
if (getDepOptimizationConfig(config, ssr)?.needsInterop?.includes(id)) {
return true;
}
const { hasModuleSyntax, exports } = exportsData;
if (!hasModuleSyntax) {
return true;
}
if (output) {
const generatedExports = output.exports;
if (!generatedExports || isSingleDefaultExport(generatedExports) && !isSingleDefaultExport(exports)) {
return true;
}
}
return false;
}
function isSingleDefaultExport(exports) {
return exports.length === 1 && exports[0] === "default";
}
const lockfileFormats = [
{ name: "package-lock.json", checkPatches: true, manager: "npm" },
{ name: "yarn.lock", checkPatches: true, manager: "yarn" },
// Included in lockfile for v2+
{ name: "pnpm-lock.yaml", checkPatches: false, manager: "pnpm" },
// Included in lockfile
{ name: "bun.lockb", checkPatches: true, manager: "bun" }
].sort((_, { manager }) => {
return process.env.npm_config_user_agent?.startsWith(manager) ? 1 : -1;
});
const lockfileNames = lockfileFormats.map((l) => l.name);
function getConfigHash(config, ssr) {
const optimizeDeps2 = getDepOptimizationConfig(config, ssr);
const content = JSON.stringify(
{
mode: process.env.NODE_ENV || config.mode,
root: config.root,
resolve: config.resolve,
assetsInclude: config.assetsInclude,
plugins: config.plugins.map((p) => p.name),
optimizeDeps: {
include: optimizeDeps2?.include ? unique(optimizeDeps2.include).sort() : void 0,
exclude: optimizeDeps2?.exclude ? unique(optimizeDeps2.exclude).sort() : void 0,
esbuildOptions: {
...optimizeDeps2?.esbuildOptions,
plugins: optimizeDeps2?.esbuildOptions?.plugins?.map((p) => p.name)
}
}
},
(_, value) => {
if (typeof value === "function" || value instanceof RegExp) {
return value.toString();
}
return value;
}
);
return getHash(content);
}
function getLockfileHash(config, ssr) {
const lockfilePath = lookupFile(config.root, lockfileNames);
let content = lockfilePath ? fs__default.readFileSync(lockfilePath, "utf-8") : "";
if (lockfilePath) {
const lockfileName = path$n.basename(lockfilePath);
const { checkPatches } = lockfileFormats.find(
(f) => f.name === lockfileName
);
if (checkPatches) {
const fullPath = path$n.join(path$n.dirname(lockfilePath), "patches");
const stat = tryStatSync(fullPath);
if (stat?.isDirectory()) {
content += stat.mtimeMs.toString();
}
}
}
return getHash(content);
}
function getDepHash(config, ssr) {
const lockfileHash = getLockfileHash(config);
const configHash = getConfigHash(config, ssr);
const hash = getHash(lockfileHash + configHash);
return {
hash,
lockfileHash,
configHash
};
}
function getOptimizedBrowserHash(hash, deps, timestamp = "") {
return getHash(hash + JSON.stringify(deps) + timestamp);
}
function optimizedDepInfoFromId(metadata, id) {
return metadata.optimized[id] || metadata.discovered[id] || metadata.chunks[id];
}
function optimizedDepInfoFromFile(metadata, file) {
return metadata.depInfoList.find((depInfo) => depInfo.file === file);
}
function findOptimizedDepInfoInRecord(dependenciesInfo, callbackFn) {
for (const o of Object.keys(dependenciesInfo)) {
const info = dependenciesInfo[o];
if (callbackFn(info, o)) {
return info;
}
}
}
async function optimizedDepNeedsInterop(metadata, file, config, ssr) {
const depInfo = optimizedDepInfoFromFile(metadata, file);
if (depInfo?.src && depInfo.needsInterop === void 0) {
depInfo.exportsData ??= extractExportsData(depInfo.src, config, ssr);
depInfo.needsInterop = needsInterop(
config,
ssr,
depInfo.id,
await depInfo.exportsData
);
}
return depInfo?.needsInterop;
}
const MAX_TEMP_DIR_AGE_MS = 24 * 60 * 60 * 1e3;
async function cleanupDepsCacheStaleDirs(config) {
try {
const cacheDir = path$n.resolve(config.cacheDir);
if (fs__default.existsSync(cacheDir)) {
const dirents = await fsp.readdir(cacheDir, { withFileTypes: true });
for (const dirent of dirents) {
if (dirent.isDirectory() && dirent.name.includes("_temp_")) {
const tempDirPath = path$n.resolve(config.cacheDir, dirent.name);
const stats = await fsp.stat(tempDirPath).catch((_) => null);
if (stats?.mtime && Date.now() - stats.mtime.getTime() > MAX_TEMP_DIR_AGE_MS) {
debug$7?.(`removing stale cache temp dir ${tempDirPath}`);
await fsp.rm(tempDirPath, { recursive: true, force: true });
}
}
}
}
} catch (err) {
config.logger.error(err);
}
}
const GRACEFUL_RENAME_TIMEOUT = 5e3;
const safeRename = promisify$4(function gracefulRename(from, to, cb) {
const start = Date.now();
let backoff = 0;
fs__default.rename(from, to, function CB(er) {
if (er && (er.code === "EACCES" || er.code === "EPERM") && Date.now() - start < GRACEFUL_RENAME_TIMEOUT) {
setTimeout(function() {
fs__default.stat(to, function(stater, st) {
if (stater && stater.code === "ENOENT") fs__default.rename(from, to, CB);
else CB(er);
});
}, backoff);
if (backoff < 100) backoff += 10;
return;
}
if (cb) cb(er);
});
});
var index$1 = {
__proto__: null,
addManuallyIncludedOptimizeDeps: addManuallyIncludedOptimizeDeps,
addOptimizedDepInfo: addOptimizedDepInfo,
cleanupDepsCacheStaleDirs: cleanupDepsCacheStaleDirs,
createIsOptimizedDepFile: createIsOptimizedDepFile,
createIsOptimizedDepUrl: createIsOptimizedDepUrl,
depsFromOptimizedDepInfo: depsFromOptimizedDepInfo,
depsLogString: depsLogString,
discoverProjectDependencies: discoverProjectDependencies,
extractExportsData: extractExportsData,
getDepsCacheDir: getDepsCacheDir,
getDepsOptimizer: getDepsOptimizer,
getOptimizedDepPath: getOptimizedDepPath,
initDepsOptimizer: initDepsOptimizer,
initDepsOptimizerMetadata: initDepsOptimizerMetadata,
initDevSsrDepsOptimizer: initDevSsrDepsOptimizer,
loadCachedDepOptimizationMetadata: loadCachedDepOptimizationMetadata,
optimizeDeps: optimizeDeps,
optimizeServerSsrDeps: optimizeServerSsrDeps,
optimizedDepInfoFromFile: optimizedDepInfoFromFile,
optimizedDepInfoFromId: optimizedDepInfoFromId,
optimizedDepNeedsInterop: optimizedDepNeedsInterop,
runOptimizeDeps: runOptimizeDeps,
toDiscoveredDependencies: toDiscoveredDependencies
};
function totalist(dir, callback, pre='') {
dir = resolve$3('.', dir);
let arr = readdirSync(dir);
let i=0, abs, stats;
for (; i < arr.length; i++) {
abs = join$1(dir, arr[i]);
stats = statSync$1(abs);
stats.isDirectory()
? totalist(abs, callback, join$1(pre, arr[i]))
: callback(join$1(pre, arr[i]), abs, stats);
}
}
/**
* @typedef ParsedURL
* @type {import('.').ParsedURL}
*/
/**
* @typedef Request
* @property {string} url
* @property {ParsedURL} _parsedUrl
*/
/**
* @param {Request} req
* @returns {ParsedURL|void}
*/
function parse$5(req) {
let raw = req.url;
if (raw == null) return;
let prev = req._parsedUrl;
if (prev && prev.raw === raw) return prev;
let pathname=raw, search='', query;
if (raw.length > 1) {
let idx = raw.indexOf('?', 1);
if (idx !== -1) {
search = raw.substring(idx);
pathname = raw.substring(0, idx);
if (search.length > 1) {
query = qs.parse(search.substring(1));
}
}
}
return req._parsedUrl = { pathname, search, query, raw };
}
const noop$2 = () => {};
function isMatch(uri, arr) {
for (let i=0; i < arr.length; i++) {
if (arr[i].test(uri)) return true;
}
}
function toAssume(uri, extns) {
let i=0, x, len=uri.length - 1;
if (uri.charCodeAt(len) === 47) {
uri = uri.substring(0, len);
}
let arr=[], tmp=`${uri}/index`;
for (; i < extns.length; i++) {
x = extns[i] ? `.${extns[i]}` : '';
if (uri) arr.push(uri + x);
arr.push(tmp + x);
}
return arr;
}
function viaCache(cache, uri, extns) {
let i=0, data, arr=toAssume(uri, extns);
for (; i < arr.length; i++) {
if (data = cache[arr[i]]) return data;
}
}
function viaLocal(dir, isEtag, uri, extns, shouldServe) {
let i=0, arr=toAssume(uri, extns);
let abs, stats, name, headers;
for (; i < arr.length; i++) {
abs = normalize$1(join$1(dir, name=arr[i]));
if (abs.startsWith(dir) && require$$0$2.existsSync(abs)) {
stats = require$$0$2.statSync(abs);
if (stats.isDirectory()) continue;
if (shouldServe && !shouldServe(abs)) continue;
headers = toHeaders(name, stats, isEtag);
headers['Cache-Control'] = isEtag ? 'no-cache' : 'no-store';
return { abs, stats, headers };
}
}
}
function is404(req, res) {
return (res.statusCode=404,res.end());
}
function send$1(req, res, file, stats, headers) {
let code=200, tmp, opts={};
headers = { ...headers };
for (let key in headers) {
tmp = res.getHeader(key);
if (tmp) headers[key] = tmp;
}
if (tmp = res.getHeader('content-type')) {
headers['Content-Type'] = tmp;
}
if (req.headers.range) {
code = 206;
let [x, y] = req.headers.range.replace('bytes=', '').split('-');
let end = opts.end = parseInt(y, 10) || stats.size - 1;
let start = opts.start = parseInt(x, 10) || 0;
if (end >= stats.size) {
end = stats.size - 1;
}
if (start >= stats.size) {
res.setHeader('Content-Range', `bytes */${stats.size}`);
res.statusCode = 416;
return res.end();
}
headers['Content-Range'] = `bytes ${start}-${end}/${stats.size}`;
headers['Content-Length'] = (end - start + 1);
headers['Accept-Ranges'] = 'bytes';
}
res.writeHead(code, headers);
require$$0$2.createReadStream(file, opts).pipe(res);
}
const ENCODING = {
'.br': 'br',
'.gz': 'gzip',
};
function toHeaders(name, stats, isEtag) {
let enc = ENCODING[name.slice(-3)];
let ctype = lookup(name.slice(0, enc && -3)) || '';
if (ctype === 'text/html') ctype += ';charset=utf-8';
let headers = {
'Content-Length': stats.size,
'Content-Type': ctype,
'Last-Modified': stats.mtime.toUTCString(),
};
if (enc) headers['Content-Encoding'] = enc;
if (isEtag) headers['ETag'] = `W/"${stats.size}-${stats.mtime.getTime()}"`;
return headers;
}
function sirv (dir, opts={}) {
dir = resolve$3(dir || '.');
let isNotFound = opts.onNoMatch || is404;
let setHeaders = opts.setHeaders || noop$2;
let extensions = opts.extensions || ['html', 'htm'];
let gzips = opts.gzip && extensions.map(x => `${x}.gz`).concat('gz');
let brots = opts.brotli && extensions.map(x => `${x}.br`).concat('br');
const FILES = {};
let fallback = '/';
let isEtag = !!opts.etag;
let isSPA = !!opts.single;
if (typeof opts.single === 'string') {
let idx = opts.single.lastIndexOf('.');
fallback += !!~idx ? opts.single.substring(0, idx) : opts.single;
}
let ignores = [];
if (opts.ignores !== false) {
ignores.push(/[/]([A-Za-z\s\d~$._-]+\.\w+){1,}$/); // any extn
if (opts.dotfiles) ignores.push(/\/\.\w/);
else ignores.push(/\/\.well-known/);
[].concat(opts.ignores || []).forEach(x => {
ignores.push(new RegExp(x, 'i'));
});
}
let cc = opts.maxAge != null && `public,max-age=${opts.maxAge}`;
if (cc && opts.immutable) cc += ',immutable';
else if (cc && opts.maxAge === 0) cc += ',must-revalidate';
if (!opts.dev) {
totalist(dir, (name, abs, stats) => {
if (/\.well-known[\\+\/]/.test(name)) ; // keep
else if (!opts.dotfiles && /(^\.|[\\+|\/+]\.)/.test(name)) return;
let headers = toHeaders(name, stats, isEtag);
if (cc) headers['Cache-Control'] = cc;
FILES['/' + name.normalize().replace(/\\+/g, '/')] = { abs, stats, headers };
});
}
let lookup = opts.dev ? viaLocal.bind(0, dir, isEtag) : viaCache.bind(0, FILES);
return function (req, res, next) {
let extns = [''];
let pathname = parse$5(req).pathname;
let val = req.headers['accept-encoding'] || '';
if (gzips && val.includes('gzip')) extns.unshift(...gzips);
if (brots && /(br|brotli)/i.test(val)) extns.unshift(...brots);
extns.push(...extensions); // [...br, ...gz, orig, ...exts]
if (pathname.indexOf('%') !== -1) {
try { pathname = decodeURI(pathname); }
catch (err) { /* malform uri */ }
}
let data = lookup(pathname, extns, opts.shouldServe) || isSPA && !isMatch(pathname, ignores) && lookup(fallback, extns, opts.shouldServe);
if (!data) return next ? next() : isNotFound(req, res);
if (isEtag && req.headers['if-none-match'] === data.headers['ETag']) {
res.writeHead(304);
return res.end();
}
if (gzips || brots) {
res.setHeader('Vary', 'Accept-Encoding');
}
setHeaders(res, pathname, data.stats);
send$1(req, res, data.abs, data.stats, data.headers);
};
}
const knownJavascriptExtensionRE = /\.[tj]sx?$/;
const sirvOptions = ({
getHeaders
}) => {
return {
dev: true,
etag: true,
extensions: [],
setHeaders(res, pathname) {
if (knownJavascriptExtensionRE.test(pathname)) {
res.setHeader("Content-Type", "text/javascript");
}
const headers = getHeaders();
if (headers) {
for (const name in headers) {
res.setHeader(name, headers[name]);
}
}
}
};
};
function servePublicMiddleware(server, publicFiles) {
const dir = server.config.publicDir;
const serve = sirv(
dir,
sirvOptions({
getHeaders: () => server.config.server.headers
})
);
const toFilePath = (url) => {
let filePath = cleanUrl(url);
if (filePath.indexOf("%") !== -1) {
try {
filePath = decodeURI(filePath);
} catch (err) {
}
}
return normalizePath$3(filePath);
};
return function viteServePublicMiddleware(req, res, next) {
if (publicFiles && !publicFiles.has(toFilePath(req.url)) || isImportRequest(req.url) || isInternalRequest(req.url)) {
return next();
}
serve(req, res, next);
};
}
function serveStaticMiddleware(server) {
const dir = server.config.root;
const serve = sirv(
dir,
sirvOptions({
getHeaders: () => server.config.server.headers
})
);
return function viteServeStaticMiddleware(req, res, next) {
const cleanedUrl = cleanUrl(req.url);
if (cleanedUrl[cleanedUrl.length - 1] === "/" || path$n.extname(cleanedUrl) === ".html" || isInternalRequest(req.url)) {
return next();
}
const url = new URL(req.url.replace(/^\/{2,}/, "/"), "http://example.com");
const pathname = decodeURI(url.pathname);
let redirectedPathname;
for (const { find, replacement } of server.config.resolve.alias) {
const matches = typeof find === "string" ? pathname.startsWith(find) : find.test(pathname);
if (matches) {
redirectedPathname = pathname.replace(find, replacement);
break;
}
}
if (redirectedPathname) {
if (redirectedPathname.startsWith(withTrailingSlash(dir))) {
redirectedPathname = redirectedPathname.slice(dir.length);
}
}
const resolvedPathname = redirectedPathname || pathname;
let fileUrl = path$n.resolve(dir, removeLeadingSlash(resolvedPathname));
if (resolvedPathname[resolvedPathname.length - 1] === "/" && fileUrl[fileUrl.length - 1] !== "/") {
fileUrl = withTrailingSlash(fileUrl);
}
if (!ensureServingAccess(fileUrl, server, res, next)) {
return;
}
if (redirectedPathname) {
url.pathname = encodeURI(redirectedPathname);
req.url = url.href.slice(url.origin.length);
}
serve(req, res, next);
};
}
function serveRawFsMiddleware(server) {
const serveFromRoot = sirv(
"/",
sirvOptions({ getHeaders: () => server.config.server.headers })
);
return function viteServeRawFsMiddleware(req, res, next) {
const url = new URL(req.url.replace(/^\/{2,}/, "/"), "http://example.com");
if (url.pathname.startsWith(FS_PREFIX)) {
const pathname = decodeURI(url.pathname);
if (!ensureServingAccess(
slash$1(path$n.resolve(fsPathFromId(pathname))),
server,
res,
next
)) {
return;
}
let newPathname = pathname.slice(FS_PREFIX.length);
if (isWindows$3) newPathname = newPathname.replace(/^[A-Z]:/i, "");
url.pathname = encodeURI(newPathname);
req.url = url.href.slice(url.origin.length);
serveFromRoot(req, res, next);
} else {
next();
}
};
}
function isFileServingAllowed(url, server) {
if (!server.config.server.fs.strict) return true;
const file = fsPathFromUrl(url);
if (server._fsDenyGlob(file)) return false;
if (server.moduleGraph.safeModulesPath.has(file)) return true;
if (server.config.server.fs.allow.some(
(uri) => isSameFileUri(uri, file) || isParentDirectory(uri, file)
))
return true;
return false;
}
function ensureServingAccess(url, server, res, next) {
if (isFileServingAllowed(url, server)) {
return true;
}
if (isFileReadable(cleanUrl(url))) {
const urlMessage = `The request url "${url}" is outside of Vite serving allow list.`;
const hintMessage = `
${server.config.server.fs.allow.map((i) => `- ${i}`).join("\n")}
Refer to docs https://vitejs.dev/config/server-options.html#server-fs-allow for configurations and more details.`;
server.config.logger.error(urlMessage);
server.config.logger.warnOnce(hintMessage + "\n");
res.statusCode = 403;
res.write(renderRestrictedErrorHTML(urlMessage + "\n" + hintMessage));
res.end();
} else {
next();
}
return false;
}
function renderRestrictedErrorHTML(msg) {
const html = String.raw;
return html`
<body>
<h1>403 Restricted</h1>
<p>${escapeHtml$2(msg).replace(/\n/g, "<br/>")}</p>
<style>
body {
padding: 1em 2em;
}
</style>
</body>
`;
}
const ERR_LOAD_URL = "ERR_LOAD_URL";
const ERR_LOAD_PUBLIC_URL = "ERR_LOAD_PUBLIC_URL";
const debugLoad = createDebugger("vite:load");
const debugTransform = createDebugger("vite:transform");
const debugCache$1 = createDebugger("vite:cache");
function transformRequest(url, server, options = {}) {
if (server._restartPromise && !options.ssr) throwClosedServerError();
const cacheKey = (options.ssr ? "ssr:" : options.html ? "html:" : "") + url;
const timestamp = Date.now();
const pending = server._pendingRequests.get(cacheKey);
if (pending) {
return server.moduleGraph.getModuleByUrl(removeTimestampQuery(url), options.ssr).then((module) => {
if (!module || pending.timestamp > module.lastInvalidationTimestamp) {
return pending.request;
} else {
pending.abort();
return transformRequest(url, server, options);
}
});
}
const request = doTransform(url, server, options, timestamp);
let cleared = false;
const clearCache = () => {
if (!cleared) {
server._pendingRequests.delete(cacheKey);
cleared = true;
}
};
server._pendingRequests.set(cacheKey, {
request,
timestamp,
abort: clearCache
});
return request.finally(clearCache);
}
async function doTransform(url, server, options, timestamp) {
url = removeTimestampQuery(url);
const { config, pluginContainer } = server;
const ssr = !!options.ssr;
if (ssr && isDepsOptimizerEnabled(config, true)) {
await initDevSsrDepsOptimizer(config, server);
}
let module = await server.moduleGraph.getModuleByUrl(url, ssr);
if (module) {
const cached = await getCachedTransformResult(
url,
module,
server,
ssr,
timestamp
);
if (cached) return cached;
}
const resolved = module ? void 0 : await pluginContainer.resolveId(url, void 0, { ssr }) ?? void 0;
const id = module?.id ?? resolved?.id ?? url;
module ??= server.moduleGraph.getModuleById(id);
if (module) {
await server.moduleGraph._ensureEntryFromUrl(url, ssr, void 0, resolved);
const cached = await getCachedTransformResult(
url,
module,
server,
ssr,
timestamp
);
if (cached) return cached;
}
const result = loadAndTransform(
id,
url,
server,
options,
timestamp,
module,
resolved
);
if (!ssr) {
const depsOptimizer = getDepsOptimizer(config, ssr);
if (!depsOptimizer?.isOptimizedDepFile(id)) {
server._registerRequestProcessing(id, () => result);
}
}
return result;
}
async function getCachedTransformResult(url, module, server, ssr, timestamp) {
const prettyUrl = debugCache$1 ? prettifyUrl(url, server.config.root) : "";
const softInvalidatedTransformResult = module && await handleModuleSoftInvalidation(module, ssr, timestamp, server);
if (softInvalidatedTransformResult) {
debugCache$1?.(`[memory-hmr] ${prettyUrl}`);
return softInvalidatedTransformResult;
}
const cached = module && (ssr ? module.ssrTransformResult : module.transformResult);
if (cached) {
debugCache$1?.(`[memory] ${prettyUrl}`);
return cached;
}
}
async function loadAndTransform(id, url, server, options, timestamp, mod, resolved) {
const { config, pluginContainer, moduleGraph } = server;
const { logger } = config;
const prettyUrl = debugLoad || debugTransform ? prettifyUrl(url, config.root) : "";
const ssr = !!options.ssr;
const file = cleanUrl(id);
let code = null;
let map = null;
const loadStart = debugLoad ? performance$1.now() : 0;
const loadResult = await pluginContainer.load(id, { ssr });
if (loadResult == null) {
if (options.html && !id.endsWith(".html")) {
return null;
}
if (options.ssr || isFileServingAllowed(file, server)) {
try {
code = await fsp.readFile(file, "utf-8");
debugLoad?.(`${timeFrom(loadStart)} [fs] ${prettyUrl}`);
} catch (e) {
if (e.code !== "ENOENT") {
if (e.code === "EISDIR") {
e.message = `${e.message} ${file}`;
}
throw e;
}
}
if (code != null) {
ensureWatchedFile(server.watcher, file, config.root);
}
}
if (code) {
try {
const extracted = await extractSourcemapFromFile(code, file);
if (extracted) {
code = extracted.code;
map = extracted.map;
}
} catch (e) {
logger.warn(`Failed to load source map for ${file}.
${e}`, {
timestamp: true
});
}
}
} else {
debugLoad?.(`${timeFrom(loadStart)} [plugin] ${prettyUrl}`);
if (isObject$1(loadResult)) {
code = loadResult.code;
map = loadResult.map;
} else {
code = loadResult;
}
}
if (code == null) {
const isPublicFile = checkPublicFile(url, config);
let publicDirName = path$n.relative(config.root, config.publicDir);
if (publicDirName[0] !== ".") publicDirName = "/" + publicDirName;
const msg = isPublicFile ? `This file is in ${publicDirName} and will be copied as-is during build without going through the plugin transforms, and therefore should not be imported from source code. It can only be referenced via HTML tags.` : `Does the file exist?`;
const importerMod = server.moduleGraph.idToModuleMap.get(id)?.importers.values().next().value;
const importer = importerMod?.file || importerMod?.url;
const err = new Error(
`Failed to load url ${url} (resolved id: ${id})${importer ? ` in ${importer}` : ""}. ${msg}`
);
err.code = isPublicFile ? ERR_LOAD_PUBLIC_URL : ERR_LOAD_URL;
throw err;
}
if (server._restartPromise && !ssr) throwClosedServerError();
mod ??= await moduleGraph._ensureEntryFromUrl(url, ssr, void 0, resolved);
const transformStart = debugTransform ? performance$1.now() : 0;
const transformResult = await pluginContainer.transform(code, id, {
inMap: map,
ssr
});
const originalCode = code;
if (transformResult == null || isObject$1(transformResult) && transformResult.code == null) {
debugTransform?.(
timeFrom(transformStart) + colors$1.dim(` [skipped] ${prettyUrl}`)
);
} else {
debugTransform?.(`${timeFrom(transformStart)} ${prettyUrl}`);
code = transformResult.code;
map = transformResult.map;
}
let normalizedMap;
if (typeof map === "string") {
normalizedMap = JSON.parse(map);
} else if (map) {
normalizedMap = map;
} else {
normalizedMap = null;
}
if (normalizedMap && "version" in normalizedMap && mod.file) {
if (normalizedMap.mappings) {
await injectSourcesContent(normalizedMap, mod.file, logger);
}
const sourcemapPath = `${mod.file}.map`;
applySourcemapIgnoreList(
normalizedMap,
sourcemapPath,
config.server.sourcemapIgnoreList,
logger
);
if (path$n.isAbsolute(mod.file)) {
let modDirname;
for (let sourcesIndex = 0; sourcesIndex < normalizedMap.sources.length; ++sourcesIndex) {
const sourcePath = normalizedMap.sources[sourcesIndex];
if (sourcePath) {
if (path$n.isAbsolute(sourcePath)) {
modDirname ??= path$n.dirname(mod.file);
normalizedMap.sources[sourcesIndex] = path$n.relative(
modDirname,
sourcePath
);
}
}
}
}
}
if (server._restartPromise && !ssr) throwClosedServerError();
const result = ssr && !server.config.experimental.skipSsrTransform ? await server.ssrTransform(code, normalizedMap, url, originalCode) : {
code,
map: normalizedMap,
etag: getEtag(code, { weak: true })
};
if (timestamp > mod.lastInvalidationTimestamp)
moduleGraph.updateModuleTransformResult(mod, result, ssr);
return result;
}
async function handleModuleSoftInvalidation(mod, ssr, timestamp, server) {
const transformResult = ssr ? mod.ssrInvalidationState : mod.invalidationState;
if (ssr) mod.ssrInvalidationState = void 0;
else mod.invalidationState = void 0;
if (!transformResult || transformResult === "HARD_INVALIDATED") return;
if (ssr ? mod.ssrTransformResult : mod.transformResult) {
throw new Error(
`Internal server error: Soft-invalidated module "${mod.url}" should not have existing transform result`
);
}
let result;
if (ssr) {
result = transformResult;
} else {
await init;
const source = transformResult.code;
const s = new MagicString(source);
const [imports] = parse$d(source, mod.id || void 0);
for (const imp of imports) {
let rawUrl = source.slice(imp.s, imp.e);
if (rawUrl === "import.meta") continue;
const hasQuotes = rawUrl[0] === '"' || rawUrl[0] === "'";
if (hasQuotes) {
rawUrl = rawUrl.slice(1, -1);
}
const urlWithoutTimestamp = removeTimestampQuery(rawUrl);
const hmrUrl = unwrapId$1(
stripBase(removeImportQuery(urlWithoutTimestamp), server.config.base)
);
for (const importedMod of mod.clientImportedModules) {
if (importedMod.url !== hmrUrl) continue;
if (importedMod.lastHMRTimestamp > 0) {
const replacedUrl = injectQuery(
urlWithoutTimestamp,
`t=${importedMod.lastHMRTimestamp}`
);
const start = hasQuotes ? imp.s + 1 : imp.s;
const end = hasQuotes ? imp.e - 1 : imp.e;
s.overwrite(start, end, replacedUrl);
}
if (imp.d === -1 && server.config.server.preTransformRequests) {
server.warmupRequest(hmrUrl, { ssr });
}
break;
}
}
const code = s.toString();
result = {
...transformResult,
code,
etag: getEtag(code, { weak: true })
};
}
if (timestamp > mod.lastInvalidationTimestamp)
server.moduleGraph.updateModuleTransformResult(mod, result, ssr);
return result;
}
function analyzeImportedModDifference(mod, rawId, moduleType, metadata) {
if (metadata?.isDynamicImport) return;
if (metadata?.importedNames?.length) {
const missingBindings = metadata.importedNames.filter((s) => !(s in mod));
if (missingBindings.length) {
const lastBinding = missingBindings[missingBindings.length - 1];
if (moduleType === "module") {
throw new SyntaxError(
`[vite] The requested module '${rawId}' does not provide an export named '${lastBinding}'`
);
} else {
throw new SyntaxError(`[vite] Named export '${lastBinding}' not found. The requested module '${rawId}' is a CommonJS module, which may not support all module.exports as named exports.
CommonJS modules can always be imported via the default export, for example using:
import pkg from '${rawId}';
const {${missingBindings.join(", ")}} = pkg;
`);
}
}
}
}
/**
* @param {import('estree').Node} param
* @returns {string[]}
*/
function extract_names(param) {
return extract_identifiers(param).map((node) => node.name);
}
/**
* @param {import('estree').Node} param
* @param {import('estree').Identifier[]} nodes
* @returns {import('estree').Identifier[]}
*/
function extract_identifiers(param, nodes = []) {
switch (param.type) {
case 'Identifier':
nodes.push(param);
break;
case 'MemberExpression':
let object = param;
while (object.type === 'MemberExpression') {
object = /** @type {any} */ (object.object);
}
nodes.push(/** @type {any} */ (object));
break;
case 'ObjectPattern':
for (const prop of param.properties) {
if (prop.type === 'RestElement') {
extract_identifiers(prop.argument, nodes);
} else {
extract_identifiers(prop.value, nodes);
}
}
break;
case 'ArrayPattern':
for (const element of param.elements) {
if (element) extract_identifiers(element, nodes);
}
break;
case 'RestElement':
extract_identifiers(param.argument, nodes);
break;
case 'AssignmentPattern':
extract_identifiers(param.left, nodes);
break;
}
return nodes;
}
/**
* @typedef { import('estree').Node} Node
* @typedef {{
* skip: () => void;
* remove: () => void;
* replace: (node: Node) => void;
* }} WalkerContext
*/
class WalkerBase {
constructor() {
/** @type {boolean} */
this.should_skip = false;
/** @type {boolean} */
this.should_remove = false;
/** @type {Node | null} */
this.replacement = null;
/** @type {WalkerContext} */
this.context = {
skip: () => (this.should_skip = true),
remove: () => (this.should_remove = true),
replace: (node) => (this.replacement = node)
};
}
/**
* @template {Node} Parent
* @param {Parent | null | undefined} parent
* @param {keyof Parent | null | undefined} prop
* @param {number | null | undefined} index
* @param {Node} node
*/
replace(parent, prop, index, node) {
if (parent && prop) {
if (index != null) {
/** @type {Array<Node>} */ (parent[prop])[index] = node;
} else {
/** @type {Node} */ (parent[prop]) = node;
}
}
}
/**
* @template {Node} Parent
* @param {Parent | null | undefined} parent
* @param {keyof Parent | null | undefined} prop
* @param {number | null | undefined} index
*/
remove(parent, prop, index) {
if (parent && prop) {
if (index !== null && index !== undefined) {
/** @type {Array<Node>} */ (parent[prop]).splice(index, 1);
} else {
delete parent[prop];
}
}
}
}
/**
* @typedef { import('estree').Node} Node
* @typedef { import('./walker.js').WalkerContext} WalkerContext
* @typedef {(
* this: WalkerContext,
* node: Node,
* parent: Node | null,
* key: string | number | symbol | null | undefined,
* index: number | null | undefined
* ) => void} SyncHandler
*/
class SyncWalker extends WalkerBase {
/**
*
* @param {SyncHandler} [enter]
* @param {SyncHandler} [leave]
*/
constructor(enter, leave) {
super();
/** @type {boolean} */
this.should_skip = false;
/** @type {boolean} */
this.should_remove = false;
/** @type {Node | null} */
this.replacement = null;
/** @type {WalkerContext} */
this.context = {
skip: () => (this.should_skip = true),
remove: () => (this.should_remove = true),
replace: (node) => (this.replacement = node)
};
/** @type {SyncHandler | undefined} */
this.enter = enter;
/** @type {SyncHandler | undefined} */
this.leave = leave;
}
/**
* @template {Node} Parent
* @param {Node} node
* @param {Parent | null} parent
* @param {keyof Parent} [prop]
* @param {number | null} [index]
* @returns {Node | null}
*/
visit(node, parent, prop, index) {
if (node) {
if (this.enter) {
const _should_skip = this.should_skip;
const _should_remove = this.should_remove;
const _replacement = this.replacement;
this.should_skip = false;
this.should_remove = false;
this.replacement = null;
this.enter.call(this.context, node, parent, prop, index);
if (this.replacement) {
node = this.replacement;
this.replace(parent, prop, index, node);
}
if (this.should_remove) {
this.remove(parent, prop, index);
}
const skipped = this.should_skip;
const removed = this.should_remove;
this.should_skip = _should_skip;
this.should_remove = _should_remove;
this.replacement = _replacement;
if (skipped) return node;
if (removed) return null;
}
/** @type {keyof Node} */
let key;
for (key in node) {
/** @type {unknown} */
const value = node[key];
if (value && typeof value === 'object') {
if (Array.isArray(value)) {
const nodes = /** @type {Array<unknown>} */ (value);
for (let i = 0; i < nodes.length; i += 1) {
const item = nodes[i];
if (isNode(item)) {
if (!this.visit(item, node, key, i)) {
// removed
i--;
}
}
}
} else if (isNode(value)) {
this.visit(value, node, key, null);
}
}
}
if (this.leave) {
const _replacement = this.replacement;
const _should_remove = this.should_remove;
this.replacement = null;
this.should_remove = false;
this.leave.call(this.context, node, parent, prop, index);
if (this.replacement) {
node = this.replacement;
this.replace(parent, prop, index, node);
}
if (this.should_remove) {
this.remove(parent, prop, index);
}
const removed = this.should_remove;
this.replacement = _replacement;
this.should_remove = _should_remove;
if (removed) return null;
}
}
return node;
}
}
/**
* Ducktype a node.
*
* @param {unknown} value
* @returns {value is Node}
*/
function isNode(value) {
return (
value !== null && typeof value === 'object' && 'type' in value && typeof value.type === 'string'
);
}
/**
* @typedef {import('estree').Node} Node
* @typedef {import('./sync.js').SyncHandler} SyncHandler
* @typedef {import('./async.js').AsyncHandler} AsyncHandler
*/
/**
* @param {Node} ast
* @param {{
* enter?: SyncHandler
* leave?: SyncHandler
* }} walker
* @returns {Node | null}
*/
function walk$1(ast, { enter, leave }) {
const instance = new SyncWalker(enter, leave);
return instance.visit(ast, null);
}
const ssrModuleExportsKey = `__vite_ssr_exports__`;
const ssrImportKey = `__vite_ssr_import__`;
const ssrDynamicImportKey = `__vite_ssr_dynamic_import__`;
const ssrExportAllKey = `__vite_ssr_exportAll__`;
const ssrImportMetaKey = `__vite_ssr_import_meta__`;
const hashbangRE = /^#!.*\n/;
async function ssrTransform(code, inMap, url, originalCode, options) {
if (options?.json?.stringify && isJSONRequest(url)) {
return ssrTransformJSON(code, inMap);
}
return ssrTransformScript(code, inMap, url, originalCode);
}
async function ssrTransformJSON(code, inMap) {
return {
code: code.replace("export default", `${ssrModuleExportsKey}.default =`),
map: inMap,
deps: [],
dynamicDeps: []
};
}
async function ssrTransformScript(code, inMap, url, originalCode) {
const s = new MagicString(code);
let ast;
try {
ast = await parseAstAsync(code);
} catch (err) {
if (!err.loc || !err.loc.line) throw err;
const line = err.loc.line;
throw new Error(
`Parse failure: ${err.message}
At file: ${url}
Contents of line ${line}: ${code.split("\n")[line - 1]}`
);
}
let uid = 0;
const deps = /* @__PURE__ */ new Set();
const dynamicDeps = /* @__PURE__ */ new Set();
const idToImportMap = /* @__PURE__ */ new Map();
const declaredConst = /* @__PURE__ */ new Set();
const hoistIndex = hashbangRE.exec(code)?.[0].length ?? 0;
function defineImport(index, source, metadata) {
deps.add(source);
const importId = `__vite_ssr_import_${uid++}__`;
if (metadata && (metadata.importedNames == null || metadata.importedNames.length === 0)) {
metadata = void 0;
}
const metadataStr = metadata ? `, ${JSON.stringify(metadata)}` : "";
s.appendLeft(
index,
`const ${importId} = await ${ssrImportKey}(${JSON.stringify(
source
)}${metadataStr});
`
);
return importId;
}
function defineExport(position, name, local = name) {
s.appendLeft(
position,
`
Object.defineProperty(${ssrModuleExportsKey}, "${name}", { enumerable: true, configurable: true, get(){ return ${local} }});`
);
}
const imports = [];
const exports = [];
for (const node of ast.body) {
if (node.type === "ImportDeclaration") {
imports.push(node);
} else if (node.type === "ExportNamedDeclaration" || node.type === "ExportDefaultDeclaration" || node.type === "ExportAllDeclaration") {
exports.push(node);
}
}
for (const node of imports) {
const importId = defineImport(hoistIndex, node.source.value, {
importedNames: node.specifiers.map((s2) => {
if (s2.type === "ImportSpecifier")
return s2.imported.type === "Identifier" ? s2.imported.name : (
// @ts-expect-error TODO: Estree types don't consider arbitrary module namespace specifiers yet
s2.imported.value
);
else if (s2.type === "ImportDefaultSpecifier") return "default";
}).filter(isDefined)
});
s.remove(node.start, node.end);
for (const spec of node.specifiers) {
if (spec.type === "ImportSpecifier") {
if (spec.imported.type === "Identifier") {
idToImportMap.set(
spec.local.name,
`${importId}.${spec.imported.name}`
);
} else {
idToImportMap.set(
spec.local.name,
`${importId}[${// @ts-expect-error TODO: Estree types don't consider arbitrary module namespace specifiers yet
JSON.stringify(spec.imported.value)}]`
);
}
} else if (spec.type === "ImportDefaultSpecifier") {
idToImportMap.set(spec.local.name, `${importId}.default`);
} else {
idToImportMap.set(spec.local.name, importId);
}
}
}
for (const node of exports) {
if (node.type === "ExportNamedDeclaration") {
if (node.declaration) {
if (node.declaration.type === "FunctionDeclaration" || node.declaration.type === "ClassDeclaration") {
defineExport(node.end, node.declaration.id.name);
} else {
for (const declaration of node.declaration.declarations) {
const names = extract_names(declaration.id);
for (const name of names) {
defineExport(node.end, name);
}
}
}
s.remove(node.start, node.declaration.start);
} else {
s.remove(node.start, node.end);
if (node.source) {
const importId = defineImport(
node.start,
node.source.value,
{
importedNames: node.specifiers.map((s2) => s2.local.name)
}
);
for (const spec of node.specifiers) {
const exportedAs = spec.exported.type === "Identifier" ? spec.exported.name : (
// @ts-expect-error TODO: Estree types don't consider arbitrary module namespace specifiers yet
spec.exported.value
);
defineExport(
node.start,
exportedAs,
`${importId}.${spec.local.name}`
);
}
} else {
for (const spec of node.specifiers) {
const local = spec.local.name;
const binding = idToImportMap.get(local);
const exportedAs = spec.exported.type === "Identifier" ? spec.exported.name : (
// @ts-expect-error TODO: Estree types don't consider arbitrary module namespace specifiers yet
spec.exported.value
);
defineExport(node.end, exportedAs, binding || local);
}
}
}
}
if (node.type === "ExportDefaultDeclaration") {
const expressionTypes = ["FunctionExpression", "ClassExpression"];
if ("id" in node.declaration && node.declaration.id && !expressionTypes.includes(node.declaration.type)) {
const { name } = node.declaration.id;
s.remove(
node.start,
node.start + 15
/* 'export default '.length */
);
s.append(
`
Object.defineProperty(${ssrModuleExportsKey}, "default", { enumerable: true, configurable: true, value: ${name} });`
);
} else {
s.update(
node.start,
node.start + 14,
`${ssrModuleExportsKey}.default =`
);
}
}
if (node.type === "ExportAllDeclaration") {
s.remove(node.start, node.end);
const importId = defineImport(node.start, node.source.value);
if (node.exported) {
defineExport(node.start, node.exported.name, `${importId}`);
} else {
s.appendLeft(node.start, `${ssrExportAllKey}(${importId});
`);
}
}
}
walk(ast, {
onIdentifier(id, parent, parentStack) {
const grandparent = parentStack[1];
const binding = idToImportMap.get(id.name);
if (!binding) {
return;
}
if (isStaticProperty(parent) && parent.shorthand) {
if (!isNodeInPattern(parent) || isInDestructuringAssignment(parent, parentStack)) {
s.appendLeft(id.end, `: ${binding}`);
}
} else if (parent.type === "PropertyDefinition" && grandparent?.type === "ClassBody" || parent.type === "ClassDeclaration" && id === parent.superClass) {
if (!declaredConst.has(id.name)) {
declaredConst.add(id.name);
const topNode = parentStack[parentStack.length - 2];
s.prependRight(topNode.start, `const ${id.name} = ${binding};
`);
}
} else if (
// don't transform class name identifier
!(parent.type === "ClassExpression" && id === parent.id)
) {
s.update(id.start, id.end, binding);
}
},
onImportMeta(node) {
s.update(node.start, node.end, ssrImportMetaKey);
},
onDynamicImport(node) {
s.update(node.start, node.start + 6, ssrDynamicImportKey);
if (node.type === "ImportExpression" && node.source.type === "Literal") {
dynamicDeps.add(node.source.value);
}
}
});
let map = s.generateMap({ hires: "boundary" });
if (inMap && inMap.mappings && "sources" in inMap && inMap.sources.length > 0) {
map = combineSourcemaps(url, [
{
...map,
sources: inMap.sources,
sourcesContent: inMap.sourcesContent
},
inMap
]);
} else {
map.sources = [path$n.basename(url)];
map.sourcesContent = [originalCode];
}
return {
code: s.toString(),
map,
deps: [...deps],
dynamicDeps: [...dynamicDeps]
};
}
const isNodeInPatternWeakSet = /* @__PURE__ */ new WeakSet();
const setIsNodeInPattern = (node) => isNodeInPatternWeakSet.add(node);
const isNodeInPattern = (node) => isNodeInPatternWeakSet.has(node);
function walk(root, { onIdentifier, onImportMeta, onDynamicImport }) {
const parentStack = [];
const varKindStack = [];
const scopeMap = /* @__PURE__ */ new WeakMap();
const identifiers = [];
const setScope = (node, name) => {
let scopeIds = scopeMap.get(node);
if (scopeIds && scopeIds.has(name)) {
return;
}
if (!scopeIds) {
scopeIds = /* @__PURE__ */ new Set();
scopeMap.set(node, scopeIds);
}
scopeIds.add(name);
};
function isInScope(name, parents) {
return parents.some((node) => node && scopeMap.get(node)?.has(name));
}
function handlePattern(p, parentScope) {
if (p.type === "Identifier") {
setScope(parentScope, p.name);
} else if (p.type === "RestElement") {
handlePattern(p.argument, parentScope);
} else if (p.type === "ObjectPattern") {
p.properties.forEach((property) => {
if (property.type === "RestElement") {
setScope(parentScope, property.argument.name);
} else {
handlePattern(property.value, parentScope);
}
});
} else if (p.type === "ArrayPattern") {
p.elements.forEach((element) => {
if (element) {
handlePattern(element, parentScope);
}
});
} else if (p.type === "AssignmentPattern") {
handlePattern(p.left, parentScope);
} else {
setScope(parentScope, p.name);
}
}
walk$1(root, {
enter(node, parent) {
if (node.type === "ImportDeclaration") {
return this.skip();
}
if (parent && !(parent.type === "IfStatement" && node === parent.alternate)) {
parentStack.unshift(parent);
}
if (node.type === "VariableDeclaration") {
varKindStack.unshift(node.kind);
}
if (node.type === "MetaProperty" && node.meta.name === "import") {
onImportMeta(node);
} else if (node.type === "ImportExpression") {
onDynamicImport(node);
}
if (node.type === "Identifier") {
if (!isInScope(node.name, parentStack) && isRefIdentifier(node, parent, parentStack)) {
identifiers.push([node, parentStack.slice(0)]);
}
} else if (isFunction$1(node)) {
if (node.type === "FunctionDeclaration") {
const parentScope = findParentScope(parentStack);
if (parentScope) {
setScope(parentScope, node.id.name);
}
}
if (node.type === "FunctionExpression" && node.id) {
setScope(node, node.id.name);
}
node.params.forEach((p) => {
if (p.type === "ObjectPattern" || p.type === "ArrayPattern") {
handlePattern(p, node);
return;
}
walk$1(p.type === "AssignmentPattern" ? p.left : p, {
enter(child, parent2) {
if (parent2?.type === "AssignmentPattern" && parent2?.right === child) {
return this.skip();
}
if (child.type !== "Identifier") return;
if (isStaticPropertyKey(child, parent2)) return;
if (parent2?.type === "TemplateLiteral" && parent2?.expressions.includes(child) || parent2?.type === "CallExpression" && parent2?.callee === child) {
return;
}
setScope(node, child.name);
}
});
});
} else if (node.type === "ClassDeclaration") {
const parentScope = findParentScope(parentStack);
if (parentScope) {
setScope(parentScope, node.id.name);
}
} else if (node.type === "ClassExpression" && node.id) {
setScope(node, node.id.name);
} else if (node.type === "Property" && parent.type === "ObjectPattern") {
setIsNodeInPattern(node);
} else if (node.type === "VariableDeclarator") {
const parentFunction = findParentScope(
parentStack,
varKindStack[0] === "var"
);
if (parentFunction) {
handlePattern(node.id, parentFunction);
}
} else if (node.type === "CatchClause" && node.param) {
handlePattern(node.param, node);
}
},
leave(node, parent) {
if (parent && !(parent.type === "IfStatement" && node === parent.alternate)) {
parentStack.shift();
}
if (node.type === "VariableDeclaration") {
varKindStack.shift();
}
}
});
identifiers.forEach(([node, stack]) => {
if (!isInScope(node.name, stack)) onIdentifier(node, stack[0], stack);
});
}
function isRefIdentifier(id, parent, parentStack) {
if (parent.type === "CatchClause" || (parent.type === "VariableDeclarator" || parent.type === "ClassDeclaration") && parent.id === id) {
return false;
}
if (isFunction$1(parent)) {
if (parent.id === id) {
return false;
}
if (parent.params.includes(id)) {
return false;
}
}
if (parent.type === "MethodDefinition" && !parent.computed) {
return false;
}
if (isStaticPropertyKey(id, parent)) {
return false;
}
if (isNodeInPattern(parent) && parent.value === id) {
return false;
}
if (parent.type === "ArrayPattern" && !isInDestructuringAssignment(parent, parentStack)) {
return false;
}
if (parent.type === "MemberExpression" && parent.property === id && !parent.computed) {
return false;
}
if (parent.type === "ExportSpecifier") {
return false;
}
if (id.name === "arguments") {
return false;
}
return true;
}
const isStaticProperty = (node) => node && node.type === "Property" && !node.computed;
const isStaticPropertyKey = (node, parent) => isStaticProperty(parent) && parent.key === node;
const functionNodeTypeRE = /Function(?:Expression|Declaration)$|Method$/;
function isFunction$1(node) {
return functionNodeTypeRE.test(node.type);
}
const blockNodeTypeRE = /^BlockStatement$|^For(?:In|Of)?Statement$/;
function isBlock(node) {
return blockNodeTypeRE.test(node.type);
}
function findParentScope(parentStack, isVar = false) {
return parentStack.find(isVar ? isFunction$1 : isBlock);
}
function isInDestructuringAssignment(parent, parentStack) {
if (parent && (parent.type === "Property" || parent.type === "ArrayPattern")) {
return parentStack.some((i) => i.type === "AssignmentExpression");
}
return false;
}
let offset;
function calculateOffsetOnce() {
if (offset !== void 0) {
return;
}
try {
new Function("throw new Error(1)")();
} catch (e) {
const match = /:(\d+):\d+\)$/.exec(e.stack.split("\n")[1]);
offset = match ? +match[1] - 1 : 0;
}
}
function ssrRewriteStacktrace(stack, moduleGraph) {
calculateOffsetOnce();
return stack.split("\n").map((line) => {
return line.replace(
/^ {4}at (?:(\S.*?)\s\()?(.+?):(\d+)(?::(\d+))?\)?/,
(input, varName, id, line2, column) => {
if (!id) return input;
const mod = moduleGraph.idToModuleMap.get(id);
const rawSourceMap = mod?.ssrTransformResult?.map;
if (!rawSourceMap) {
return input;
}
const traced = new TraceMap(rawSourceMap);
const pos = originalPositionFor$1(traced, {
line: Number(line2) - offset,
// stacktrace's column is 1-indexed, but sourcemap's one is 0-indexed
column: Number(column) - 1
});
if (!pos.source || pos.line == null || pos.column == null) {
return input;
}
const trimmedVarName = varName.trim();
const sourceFile = path$n.resolve(path$n.dirname(id), pos.source);
const source = `${sourceFile}:${pos.line}:${pos.column + 1}`;
if (!trimmedVarName || trimmedVarName === "eval") {
return ` at ${source}`;
} else {
return ` at ${trimmedVarName} (${source})`;
}
}
);
}).join("\n");
}
function rebindErrorStacktrace(e, stacktrace) {
const { configurable, writable } = Object.getOwnPropertyDescriptor(
e,
"stack"
);
if (configurable) {
Object.defineProperty(e, "stack", {
value: stacktrace,
enumerable: true,
configurable: true,
writable: true
});
} else if (writable) {
e.stack = stacktrace;
}
}
const rewroteStacktraces = /* @__PURE__ */ new WeakSet();
function ssrFixStacktrace(e, moduleGraph) {
if (!e.stack) return;
if (rewroteStacktraces.has(e)) return;
const stacktrace = ssrRewriteStacktrace(e.stack, moduleGraph);
rebindErrorStacktrace(e, stacktrace);
rewroteStacktraces.add(e);
}
const pendingModules = /* @__PURE__ */ new Map();
const pendingModuleDependencyGraph = /* @__PURE__ */ new Map();
const importErrors = /* @__PURE__ */ new WeakMap();
async function ssrLoadModule(url, server, fixStacktrace) {
url = unwrapId$1(url);
const pending = pendingModules.get(url);
if (pending) {
return pending;
}
const modulePromise = instantiateModule(url, server, fixStacktrace);
pendingModules.set(url, modulePromise);
modulePromise.catch(() => {
}).finally(() => {
pendingModules.delete(url);
});
return modulePromise;
}
async function instantiateModule(url, server, fixStacktrace) {
const { moduleGraph } = server;
const mod = await moduleGraph.ensureEntryFromUrl(url, true);
if (mod.ssrError) {
throw mod.ssrError;
}
if (mod.ssrModule) {
return mod.ssrModule;
}
const result = mod.ssrTransformResult || await transformRequest(url, server, { ssr: true });
if (!result) {
throw new Error(`failed to load module for ssr: ${url}`);
}
const ssrModule = {
[Symbol.toStringTag]: "Module"
};
Object.defineProperty(ssrModule, "__esModule", { value: true });
mod.ssrModule = ssrModule;
const osNormalizedFilename = isWindows$3 ? path$n.resolve(mod.file) : mod.file;
const ssrImportMeta = {
dirname: path$n.dirname(osNormalizedFilename),
filename: osNormalizedFilename,
// The filesystem URL, matching native Node.js modules
url: pathToFileURL(mod.file).toString()
};
const {
isProduction,
resolve: { dedupe, preserveSymlinks },
root,
ssr
} = server.config;
const overrideConditions = ssr.resolve?.externalConditions || [];
const resolveOptions = {
mainFields: ["main"],
conditions: [],
overrideConditions: [...overrideConditions, "production", "development"],
extensions: [".js", ".cjs", ".json"],
dedupe,
preserveSymlinks,
isBuild: false,
isProduction,
root,
ssrConfig: ssr,
legacyProxySsrExternalModules: server.config.legacy?.proxySsrExternalModules,
packageCache: server.config.packageCache
};
const ssrImport = async (dep, metadata) => {
try {
if (dep[0] !== "." && dep[0] !== "/") {
return await nodeImport(dep, mod.file, resolveOptions, metadata);
}
dep = unwrapId$1(dep);
if (!metadata?.isDynamicImport) {
addPendingModuleDependency(url, dep);
if (checkModuleDependencyExists(dep, url)) {
const depSsrModule = moduleGraph.urlToModuleMap.get(dep)?.ssrModule;
if (!depSsrModule) {
throw new Error(
"[vite] The dependency module is not yet fully initialized due to circular dependency. This is a bug in Vite SSR"
);
}
return depSsrModule;
}
}
return ssrLoadModule(dep, server, fixStacktrace);
} catch (err) {
importErrors.set(err, { importee: dep });
throw err;
}
};
const ssrDynamicImport = (dep) => {
if (dep[0] === ".") {
dep = path$n.posix.resolve(path$n.dirname(url), dep);
}
return ssrImport(dep, { isDynamicImport: true });
};
function ssrExportAll(sourceModule) {
for (const key in sourceModule) {
if (key !== "default" && key !== "__esModule") {
Object.defineProperty(ssrModule, key, {
enumerable: true,
configurable: true,
get() {
return sourceModule[key];
}
});
}
}
}
let sourceMapSuffix = "";
if (result.map && "version" in result.map) {
const moduleSourceMap = Object.assign({}, result.map, {
mappings: ";".repeat(asyncFunctionDeclarationPaddingLineCount) + result.map.mappings
});
sourceMapSuffix = `
//# ${SOURCEMAPPING_URL}=${genSourceMapUrl(moduleSourceMap)}`;
}
try {
const initModule = new AsyncFunction(
ssrModuleExportsKey,
ssrImportMetaKey,
ssrImportKey,
ssrDynamicImportKey,
ssrExportAllKey,
'"use strict";' + result.code + `
//# sourceURL=${mod.id}${sourceMapSuffix}`
);
await initModule(
ssrModule,
ssrImportMeta,
ssrImport,
ssrDynamicImport,
ssrExportAll
);
} catch (e) {
mod.ssrError = e;
const errorData = importErrors.get(e);
if (e.stack && fixStacktrace) {
ssrFixStacktrace(e, moduleGraph);
}
server.config.logger.error(
colors$1.red(
`Error when evaluating SSR module ${url}:` + (errorData?.importee ? ` failed to import "${errorData.importee}"` : "") + `
|- ${e.stack}
`
),
{
timestamp: true,
clear: server.config.clearScreen,
error: e
}
);
throw e;
} finally {
pendingModuleDependencyGraph.delete(url);
}
return Object.freeze(ssrModule);
}
function addPendingModuleDependency(originUrl, depUrl) {
if (pendingModuleDependencyGraph.has(originUrl)) {
pendingModuleDependencyGraph.get(originUrl).add(depUrl);
} else {
pendingModuleDependencyGraph.set(originUrl, /* @__PURE__ */ new Set([depUrl]));
}
}
function checkModuleDependencyExists(originUrl, targetUrl) {
const visited = /* @__PURE__ */ new Set();
const stack = [originUrl];
while (stack.length) {
const currentUrl = stack.pop();
if (currentUrl === targetUrl) {
return true;
}
if (!visited.has(currentUrl)) {
visited.add(currentUrl);
const dependencies = pendingModuleDependencyGraph.get(currentUrl);
if (dependencies) {
for (const depUrl of dependencies) {
if (!visited.has(depUrl)) {
stack.push(depUrl);
}
}
}
}
}
return false;
}
async function nodeImport(id, importer, resolveOptions, metadata) {
let url;
let filePath;
if (id.startsWith("data:") || isExternalUrl(id) || isBuiltin(id)) {
url = id;
} else {
const resolved = tryNodeResolve(
id,
importer,
{ ...resolveOptions, tryEsmOnly: true },
false,
void 0,
true
);
if (!resolved) {
const err = new Error(
`Cannot find module '${id}' imported from '${importer}'`
);
err.code = "ERR_MODULE_NOT_FOUND";
throw err;
}
filePath = resolved.id;
url = pathToFileURL(resolved.id).toString();
}
const mod = await import(url);
if (resolveOptions.legacyProxySsrExternalModules) {
return proxyESM(mod);
} else if (filePath) {
analyzeImportedModDifference(
mod,
id,
isFilePathESM(filePath, resolveOptions.packageCache) ? "module" : void 0,
metadata
);
return mod;
} else {
return mod;
}
}
function proxyESM(mod) {
if (isPrimitive(mod)) return { default: mod };
let defaultExport = "default" in mod ? mod.default : mod;
if (!isPrimitive(defaultExport) && "__esModule" in defaultExport) {
mod = defaultExport;
if ("default" in defaultExport) {
defaultExport = defaultExport.default;
}
}
return new Proxy(mod, {
get(mod2, prop) {
if (prop === "default") return defaultExport;
return mod2[prop] ?? defaultExport?.[prop];
}
});
}
function isPrimitive(value) {
return !value || typeof value !== "object" && typeof value !== "function";
}
var isWsl$2 = {exports: {}};
const fs$3 = require$$0__default;
let isDocker$2;
function hasDockerEnv() {
try {
fs$3.statSync('/.dockerenv');
return true;
} catch (_) {
return false;
}
}
function hasDockerCGroup() {
try {
return fs$3.readFileSync('/proc/self/cgroup', 'utf8').includes('docker');
} catch (_) {
return false;
}
}
var isDocker_1 = () => {
if (isDocker$2 === undefined) {
isDocker$2 = hasDockerEnv() || hasDockerCGroup();
}
return isDocker$2;
};
const os = require$$2;
const fs$2 = require$$0__default;
const isDocker$1 = isDocker_1;
const isWsl$1 = () => {
if (process.platform !== 'linux') {
return false;
}
if (os.release().toLowerCase().includes('microsoft')) {
if (isDocker$1()) {
return false;
}
return true;
}
try {
return fs$2.readFileSync('/proc/version', 'utf8').toLowerCase().includes('microsoft') ?
!isDocker$1() : false;
} catch (_) {
return false;
}
};
if (process.env.__IS_WSL_TEST__) {
isWsl$2.exports = isWsl$1;
} else {
isWsl$2.exports = isWsl$1();
}
var isWslExports = isWsl$2.exports;
var defineLazyProp = (object, propertyName, fn) => {
const define = value => Object.defineProperty(object, propertyName, {value, enumerable: true, writable: true});
Object.defineProperty(object, propertyName, {
configurable: true,
enumerable: true,
get() {
const result = fn();
define(result);
return result;
},
set(value) {
define(value);
}
});
return object;
};
const path$3 = require$$0$4;
const childProcess = require$$2$1;
const {promises: fs$1, constants: fsConstants} = require$$0__default;
const isWsl = isWslExports;
const isDocker = isDocker_1;
const defineLazyProperty = defineLazyProp;
// Path to included `xdg-open`.
const localXdgOpenPath = path$3.join(__dirname, 'xdg-open');
const {platform, arch} = process;
// Podman detection
const hasContainerEnv = () => {
try {
fs$1.statSync('/run/.containerenv');
return true;
} catch {
return false;
}
};
let cachedResult;
function isInsideContainer() {
if (cachedResult === undefined) {
cachedResult = hasContainerEnv() || isDocker();
}
return cachedResult;
}
/**
Get the mount point for fixed drives in WSL.
@inner
@returns {string} The mount point.
*/
const getWslDrivesMountPoint = (() => {
// Default value for "root" param
// according to https://docs.microsoft.com/en-us/windows/wsl/wsl-config
const defaultMountPoint = '/mnt/';
let mountPoint;
return async function () {
if (mountPoint) {
// Return memoized mount point value
return mountPoint;
}
const configFilePath = '/etc/wsl.conf';
let isConfigFileExists = false;
try {
await fs$1.access(configFilePath, fsConstants.F_OK);
isConfigFileExists = true;
} catch {}
if (!isConfigFileExists) {
return defaultMountPoint;
}
const configContent = await fs$1.readFile(configFilePath, {encoding: 'utf8'});
const configMountPoint = /(?<!#.*)root\s*=\s*(?<mountPoint>.*)/g.exec(configContent);
if (!configMountPoint) {
return defaultMountPoint;
}
mountPoint = configMountPoint.groups.mountPoint.trim();
mountPoint = mountPoint.endsWith('/') ? mountPoint : `${mountPoint}/`;
return mountPoint;
};
})();
const pTryEach = async (array, mapper) => {
let latestError;
for (const item of array) {
try {
return await mapper(item); // eslint-disable-line no-await-in-loop
} catch (error) {
latestError = error;
}
}
throw latestError;
};
const baseOpen = async options => {
options = {
wait: false,
background: false,
newInstance: false,
allowNonzeroExitCode: false,
...options
};
if (Array.isArray(options.app)) {
return pTryEach(options.app, singleApp => baseOpen({
...options,
app: singleApp
}));
}
let {name: app, arguments: appArguments = []} = options.app || {};
appArguments = [...appArguments];
if (Array.isArray(app)) {
return pTryEach(app, appName => baseOpen({
...options,
app: {
name: appName,
arguments: appArguments
}
}));
}
let command;
const cliArguments = [];
const childProcessOptions = {};
if (platform === 'darwin') {
command = 'open';
if (options.wait) {
cliArguments.push('--wait-apps');
}
if (options.background) {
cliArguments.push('--background');
}
if (options.newInstance) {
cliArguments.push('--new');
}
if (app) {
cliArguments.push('-a', app);
}
} else if (platform === 'win32' || (isWsl && !isInsideContainer() && !app)) {
const mountPoint = await getWslDrivesMountPoint();
command = isWsl ?
`${mountPoint}c/Windows/System32/WindowsPowerShell/v1.0/powershell.exe` :
`${process.env.SYSTEMROOT}\\System32\\WindowsPowerShell\\v1.0\\powershell`;
cliArguments.push(
'-NoProfile',
'-NonInteractive',
'–ExecutionPolicy',
'Bypass',
'-EncodedCommand'
);
if (!isWsl) {
childProcessOptions.windowsVerbatimArguments = true;
}
const encodedArguments = ['Start'];
if (options.wait) {
encodedArguments.push('-Wait');
}
if (app) {
// Double quote with double quotes to ensure the inner quotes are passed through.
// Inner quotes are delimited for PowerShell interpretation with backticks.
encodedArguments.push(`"\`"${app}\`""`, '-ArgumentList');
if (options.target) {
appArguments.unshift(options.target);
}
} else if (options.target) {
encodedArguments.push(`"${options.target}"`);
}
if (appArguments.length > 0) {
appArguments = appArguments.map(arg => `"\`"${arg}\`""`);
encodedArguments.push(appArguments.join(','));
}
// Using Base64-encoded command, accepted by PowerShell, to allow special characters.
options.target = Buffer.from(encodedArguments.join(' '), 'utf16le').toString('base64');
} else {
if (app) {
command = app;
} else {
// When bundled by Webpack, there's no actual package file path and no local `xdg-open`.
const isBundled = !__dirname || __dirname === '/';
// Check if local `xdg-open` exists and is executable.
let exeLocalXdgOpen = false;
try {
await fs$1.access(localXdgOpenPath, fsConstants.X_OK);
exeLocalXdgOpen = true;
} catch {}
const useSystemXdgOpen = process.versions.electron ||
platform === 'android' || isBundled || !exeLocalXdgOpen;
command = useSystemXdgOpen ? 'xdg-open' : localXdgOpenPath;
}
if (appArguments.length > 0) {
cliArguments.push(...appArguments);
}
if (!options.wait) {
// `xdg-open` will block the process unless stdio is ignored
// and it's detached from the parent even if it's unref'd.
childProcessOptions.stdio = 'ignore';
childProcessOptions.detached = true;
}
}
if (options.target) {
cliArguments.push(options.target);
}
if (platform === 'darwin' && appArguments.length > 0) {
cliArguments.push('--args', ...appArguments);
}
const subprocess = childProcess.spawn(command, cliArguments, childProcessOptions);
if (options.wait) {
return new Promise((resolve, reject) => {
subprocess.once('error', reject);
subprocess.once('close', exitCode => {
if (!options.allowNonzeroExitCode && exitCode > 0) {
reject(new Error(`Exited with code ${exitCode}`));
return;
}
resolve(subprocess);
});
});
}
subprocess.unref();
return subprocess;
};
const open = (target, options) => {
if (typeof target !== 'string') {
throw new TypeError('Expected a `target`');
}
return baseOpen({
...options,
target
});
};
const openApp = (name, options) => {
if (typeof name !== 'string') {
throw new TypeError('Expected a `name`');
}
const {arguments: appArguments = []} = options || {};
if (appArguments !== undefined && appArguments !== null && !Array.isArray(appArguments)) {
throw new TypeError('Expected `appArguments` as Array type');
}
return baseOpen({
...options,
app: {
name,
arguments: appArguments
}
});
};
function detectArchBinary(binary) {
if (typeof binary === 'string' || Array.isArray(binary)) {
return binary;
}
const {[arch]: archBinary} = binary;
if (!archBinary) {
throw new Error(`${arch} is not supported`);
}
return archBinary;
}
function detectPlatformBinary({[platform]: platformBinary}, {wsl}) {
if (wsl && isWsl) {
return detectArchBinary(wsl);
}
if (!platformBinary) {
throw new Error(`${platform} is not supported`);
}
return detectArchBinary(platformBinary);
}
const apps = {};
defineLazyProperty(apps, 'chrome', () => detectPlatformBinary({
darwin: 'google chrome',
win32: 'chrome',
linux: ['google-chrome', 'google-chrome-stable', 'chromium']
}, {
wsl: {
ia32: '/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe',
x64: ['/mnt/c/Program Files/Google/Chrome/Application/chrome.exe', '/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe']
}
}));
defineLazyProperty(apps, 'firefox', () => detectPlatformBinary({
darwin: 'firefox',
win32: 'C:\\Program Files\\Mozilla Firefox\\firefox.exe',
linux: 'firefox'
}, {
wsl: '/mnt/c/Program Files/Mozilla Firefox/firefox.exe'
}));
defineLazyProperty(apps, 'edge', () => detectPlatformBinary({
darwin: 'microsoft edge',
win32: 'msedge',
linux: ['microsoft-edge', 'microsoft-edge-dev']
}, {
wsl: '/mnt/c/Program Files (x86)/Microsoft/Edge/Application/msedge.exe'
}));
open.apps = apps;
open.openApp = openApp;
var open_1 = open;
var open$1 = /*@__PURE__*/getDefaultExportFromCjs(open_1);
var crossSpawn = {exports: {}};
var windows;
var hasRequiredWindows;
function requireWindows () {
if (hasRequiredWindows) return windows;
hasRequiredWindows = 1;
windows = isexe;
isexe.sync = sync;
var fs = require$$0__default;
function checkPathExt (path, options) {
var pathext = options.pathExt !== undefined ?
options.pathExt : process.env.PATHEXT;
if (!pathext) {
return true
}
pathext = pathext.split(';');
if (pathext.indexOf('') !== -1) {
return true
}
for (var i = 0; i < pathext.length; i++) {
var p = pathext[i].toLowerCase();
if (p && path.substr(-p.length).toLowerCase() === p) {
return true
}
}
return false
}
function checkStat (stat, path, options) {
if (!stat.isSymbolicLink() && !stat.isFile()) {
return false
}
return checkPathExt(path, options)
}
function isexe (path, options, cb) {
fs.stat(path, function (er, stat) {
cb(er, er ? false : checkStat(stat, path, options));
});
}
function sync (path, options) {
return checkStat(fs.statSync(path), path, options)
}
return windows;
}
var mode;
var hasRequiredMode;
function requireMode () {
if (hasRequiredMode) return mode;
hasRequiredMode = 1;
mode = isexe;
isexe.sync = sync;
var fs = require$$0__default;
function isexe (path, options, cb) {
fs.stat(path, function (er, stat) {
cb(er, er ? false : checkStat(stat, options));
});
}
function sync (path, options) {
return checkStat(fs.statSync(path), options)
}
function checkStat (stat, options) {
return stat.isFile() && checkMode(stat, options)
}
function checkMode (stat, options) {
var mod = stat.mode;
var uid = stat.uid;
var gid = stat.gid;
var myUid = options.uid !== undefined ?
options.uid : process.getuid && process.getuid();
var myGid = options.gid !== undefined ?
options.gid : process.getgid && process.getgid();
var u = parseInt('100', 8);
var g = parseInt('010', 8);
var o = parseInt('001', 8);
var ug = u | g;
var ret = (mod & o) ||
(mod & g) && gid === myGid ||
(mod & u) && uid === myUid ||
(mod & ug) && myUid === 0;
return ret
}
return mode;
}
var core;
if (process.platform === 'win32' || commonjsGlobal.TESTING_WINDOWS) {
core = requireWindows();
} else {
core = requireMode();
}
var isexe_1 = isexe$1;
isexe$1.sync = sync;
function isexe$1 (path, options, cb) {
if (typeof options === 'function') {
cb = options;
options = {};
}
if (!cb) {
if (typeof Promise !== 'function') {
throw new TypeError('callback not provided')
}
return new Promise(function (resolve, reject) {
isexe$1(path, options || {}, function (er, is) {
if (er) {
reject(er);
} else {
resolve(is);
}
});
})
}
core(path, options || {}, function (er, is) {
// ignore EACCES because that just means we aren't allowed to run it
if (er) {
if (er.code === 'EACCES' || options && options.ignoreErrors) {
er = null;
is = false;
}
}
cb(er, is);
});
}
function sync (path, options) {
// my kingdom for a filtered catch
try {
return core.sync(path, options || {})
} catch (er) {
if (options && options.ignoreErrors || er.code === 'EACCES') {
return false
} else {
throw er
}
}
}
const isWindows = process.platform === 'win32' ||
process.env.OSTYPE === 'cygwin' ||
process.env.OSTYPE === 'msys';
const path$2 = require$$0$4;
const COLON = isWindows ? ';' : ':';
const isexe = isexe_1;
const getNotFoundError = (cmd) =>
Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' });
const getPathInfo = (cmd, opt) => {
const colon = opt.colon || COLON;
// If it has a slash, then we don't bother searching the pathenv.
// just check the file itself, and that's it.
const pathEnv = cmd.match(/\//) || isWindows && cmd.match(/\\/) ? ['']
: (
[
// windows always checks the cwd first
...(isWindows ? [process.cwd()] : []),
...(opt.path || process.env.PATH ||
/* istanbul ignore next: very unusual */ '').split(colon),
]
);
const pathExtExe = isWindows
? opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM'
: '';
const pathExt = isWindows ? pathExtExe.split(colon) : [''];
if (isWindows) {
if (cmd.indexOf('.') !== -1 && pathExt[0] !== '')
pathExt.unshift('');
}
return {
pathEnv,
pathExt,
pathExtExe,
}
};
const which$1 = (cmd, opt, cb) => {
if (typeof opt === 'function') {
cb = opt;
opt = {};
}
if (!opt)
opt = {};
const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt);
const found = [];
const step = i => new Promise((resolve, reject) => {
if (i === pathEnv.length)
return opt.all && found.length ? resolve(found)
: reject(getNotFoundError(cmd))
const ppRaw = pathEnv[i];
const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw;
const pCmd = path$2.join(pathPart, cmd);
const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd
: pCmd;
resolve(subStep(p, i, 0));
});
const subStep = (p, i, ii) => new Promise((resolve, reject) => {
if (ii === pathExt.length)
return resolve(step(i + 1))
const ext = pathExt[ii];
isexe(p + ext, { pathExt: pathExtExe }, (er, is) => {
if (!er && is) {
if (opt.all)
found.push(p + ext);
else
return resolve(p + ext)
}
return resolve(subStep(p, i, ii + 1))
});
});
return cb ? step(0).then(res => cb(null, res), cb) : step(0)
};
const whichSync = (cmd, opt) => {
opt = opt || {};
const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt);
const found = [];
for (let i = 0; i < pathEnv.length; i ++) {
const ppRaw = pathEnv[i];
const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw;
const pCmd = path$2.join(pathPart, cmd);
const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd
: pCmd;
for (let j = 0; j < pathExt.length; j ++) {
const cur = p + pathExt[j];
try {
const is = isexe.sync(cur, { pathExt: pathExtExe });
if (is) {
if (opt.all)
found.push(cur);
else
return cur
}
} catch (ex) {}
}
}
if (opt.all && found.length)
return found
if (opt.nothrow)
return null
throw getNotFoundError(cmd)
};
var which_1 = which$1;
which$1.sync = whichSync;
var pathKey$1 = {exports: {}};
const pathKey = (options = {}) => {
const environment = options.env || process.env;
const platform = options.platform || process.platform;
if (platform !== 'win32') {
return 'PATH';
}
return Object.keys(environment).reverse().find(key => key.toUpperCase() === 'PATH') || 'Path';
};
pathKey$1.exports = pathKey;
// TODO: Remove this for the next major release
pathKey$1.exports.default = pathKey;
var pathKeyExports = pathKey$1.exports;
const path$1 = require$$0$4;
const which = which_1;
const getPathKey = pathKeyExports;
function resolveCommandAttempt(parsed, withoutPathExt) {
const env = parsed.options.env || process.env;
const cwd = process.cwd();
const hasCustomCwd = parsed.options.cwd != null;
// Worker threads do not have process.chdir()
const shouldSwitchCwd = hasCustomCwd && process.chdir !== undefined && !process.chdir.disabled;
// If a custom `cwd` was specified, we need to change the process cwd
// because `which` will do stat calls but does not support a custom cwd
if (shouldSwitchCwd) {
try {
process.chdir(parsed.options.cwd);
} catch (err) {
/* Empty */
}
}
let resolved;
try {
resolved = which.sync(parsed.command, {
path: env[getPathKey({ env })],
pathExt: withoutPathExt ? path$1.delimiter : undefined,
});
} catch (e) {
/* Empty */
} finally {
if (shouldSwitchCwd) {
process.chdir(cwd);
}
}
// If we successfully resolved, ensure that an absolute path is returned
// Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it
if (resolved) {
resolved = path$1.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved);
}
return resolved;
}
function resolveCommand$1(parsed) {
return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true);
}
var resolveCommand_1 = resolveCommand$1;
var _escape = {};
// See http://www.robvanderwoude.com/escapechars.php
const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g;
function escapeCommand(arg) {
// Escape meta chars
arg = arg.replace(metaCharsRegExp, '^$1');
return arg;
}
function escapeArgument(arg, doubleEscapeMetaChars) {
// Convert to string
arg = `${arg}`;
// Algorithm below is based on https://qntm.org/cmd
// Sequence of backslashes followed by a double quote:
// double up all the backslashes and escape the double quote
arg = arg.replace(/(\\*)"/g, '$1$1\\"');
// Sequence of backslashes followed by the end of the string
// (which will become a double quote later):
// double up all the backslashes
arg = arg.replace(/(\\*)$/, '$1$1');
// All other backslashes occur literally
// Quote the whole thing:
arg = `"${arg}"`;
// Escape meta chars
arg = arg.replace(metaCharsRegExp, '^$1');
// Double escape meta chars if necessary
if (doubleEscapeMetaChars) {
arg = arg.replace(metaCharsRegExp, '^$1');
}
return arg;
}
_escape.command = escapeCommand;
_escape.argument = escapeArgument;
var shebangRegex$1 = /^#!(.*)/;
const shebangRegex = shebangRegex$1;
var shebangCommand$1 = (string = '') => {
const match = string.match(shebangRegex);
if (!match) {
return null;
}
const [path, argument] = match[0].replace(/#! ?/, '').split(' ');
const binary = path.split('/').pop();
if (binary === 'env') {
return argument;
}
return argument ? `${binary} ${argument}` : binary;
};
const fs = require$$0__default;
const shebangCommand = shebangCommand$1;
function readShebang$1(command) {
// Read the first 150 bytes from the file
const size = 150;
const buffer = Buffer.alloc(size);
let fd;
try {
fd = fs.openSync(command, 'r');
fs.readSync(fd, buffer, 0, size, 0);
fs.closeSync(fd);
} catch (e) { /* Empty */ }
// Attempt to extract shebang (null is returned if not a shebang)
return shebangCommand(buffer.toString());
}
var readShebang_1 = readShebang$1;
const path = require$$0$4;
const resolveCommand = resolveCommand_1;
const escape$1 = _escape;
const readShebang = readShebang_1;
const isWin$1 = process.platform === 'win32';
const isExecutableRegExp = /\.(?:com|exe)$/i;
const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i;
function detectShebang(parsed) {
parsed.file = resolveCommand(parsed);
const shebang = parsed.file && readShebang(parsed.file);
if (shebang) {
parsed.args.unshift(parsed.file);
parsed.command = shebang;
return resolveCommand(parsed);
}
return parsed.file;
}
function parseNonShell(parsed) {
if (!isWin$1) {
return parsed;
}
// Detect & add support for shebangs
const commandFile = detectShebang(parsed);
// We don't need a shell if the command filename is an executable
const needsShell = !isExecutableRegExp.test(commandFile);
// If a shell is required, use cmd.exe and take care of escaping everything correctly
// Note that `forceShell` is an hidden option used only in tests
if (parsed.options.forceShell || needsShell) {
// Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/`
// The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument
// Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called,
// we need to double escape them
const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile);
// Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar)
// This is necessary otherwise it will always fail with ENOENT in those cases
parsed.command = path.normalize(parsed.command);
// Escape command & arguments
parsed.command = escape$1.command(parsed.command);
parsed.args = parsed.args.map((arg) => escape$1.argument(arg, needsDoubleEscapeMetaChars));
const shellCommand = [parsed.command].concat(parsed.args).join(' ');
parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`];
parsed.command = process.env.comspec || 'cmd.exe';
parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
}
return parsed;
}
function parse$4(command, args, options) {
// Normalize arguments, similar to nodejs
if (args && !Array.isArray(args)) {
options = args;
args = null;
}
args = args ? args.slice(0) : []; // Clone array to avoid changing the original
options = Object.assign({}, options); // Clone object to avoid changing the original
// Build our parsed object
const parsed = {
command,
args,
options,
file: undefined,
original: {
command,
args,
},
};
// Delegate further parsing to shell or non-shell
return options.shell ? parsed : parseNonShell(parsed);
}
var parse_1 = parse$4;
const isWin = process.platform === 'win32';
function notFoundError(original, syscall) {
return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), {
code: 'ENOENT',
errno: 'ENOENT',
syscall: `${syscall} ${original.command}`,
path: original.command,
spawnargs: original.args,
});
}
function hookChildProcess(cp, parsed) {
if (!isWin) {
return;
}
const originalEmit = cp.emit;
cp.emit = function (name, arg1) {
// If emitting "exit" event and exit code is 1, we need to check if
// the command exists and emit an "error" instead
// See https://github.com/IndigoUnited/node-cross-spawn/issues/16
if (name === 'exit') {
const err = verifyENOENT(arg1, parsed);
if (err) {
return originalEmit.call(cp, 'error', err);
}
}
return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params
};
}
function verifyENOENT(status, parsed) {
if (isWin && status === 1 && !parsed.file) {
return notFoundError(parsed.original, 'spawn');
}
return null;
}
function verifyENOENTSync(status, parsed) {
if (isWin && status === 1 && !parsed.file) {
return notFoundError(parsed.original, 'spawnSync');
}
return null;
}
var enoent$1 = {
hookChildProcess,
verifyENOENT,
verifyENOENTSync,
notFoundError,
};
const cp = require$$2$1;
const parse$3 = parse_1;
const enoent = enoent$1;
function spawn(command, args, options) {
// Parse the arguments
const parsed = parse$3(command, args, options);
// Spawn the child process
const spawned = cp.spawn(parsed.command, parsed.args, parsed.options);
// Hook into child process "exit" event to emit an error if the command
// does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
enoent.hookChildProcess(spawned, parsed);
return spawned;
}
function spawnSync(command, args, options) {
// Parse the arguments
const parsed = parse$3(command, args, options);
// Spawn the child process
const result = cp.spawnSync(parsed.command, parsed.args, parsed.options);
// Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
result.error = result.error || enoent.verifyENOENTSync(result.status, parsed);
return result;
}
crossSpawn.exports = spawn;
crossSpawn.exports.spawn = spawn;
crossSpawn.exports.sync = spawnSync;
crossSpawn.exports._parse = parse$3;
crossSpawn.exports._enoent = enoent;
var crossSpawnExports = crossSpawn.exports;
var spawn$1 = /*@__PURE__*/getDefaultExportFromCjs(crossSpawnExports);
function openBrowser(url, opt, logger) {
const browser = process.env.BROWSER || "";
if (browser.toLowerCase().endsWith(".js")) {
executeNodeScript(browser, url, logger);
} else if (browser.toLowerCase() !== "none") {
const browserArgs = process.env.BROWSER_ARGS ? process.env.BROWSER_ARGS.split(" ") : [];
startBrowserProcess(browser, browserArgs, url, logger);
}
}
function executeNodeScript(scriptPath, url, logger) {
const extraArgs = process.argv.slice(2);
const child = spawn$1(process.execPath, [scriptPath, ...extraArgs, url], {
stdio: "inherit"
});
child.on("close", (code) => {
if (code !== 0) {
logger.error(
colors$1.red(
`
The script specified as BROWSER environment variable failed.
${colors$1.cyan(
scriptPath
)} exited with code ${code}.`
),
{ error: null }
);
}
});
}
const supportedChromiumBrowsers = [
"Google Chrome Canary",
"Google Chrome Dev",
"Google Chrome Beta",
"Google Chrome",
"Microsoft Edge",
"Brave Browser",
"Vivaldi",
"Chromium"
];
async function startBrowserProcess(browser, browserArgs, url, logger) {
const preferredOSXBrowser = browser === "google chrome" ? "Google Chrome" : browser;
const shouldTryOpenChromeWithAppleScript = process.platform === "darwin" && (!preferredOSXBrowser || supportedChromiumBrowsers.includes(preferredOSXBrowser));
if (shouldTryOpenChromeWithAppleScript) {
try {
const ps = await execAsync("ps cax");
const openedBrowser = preferredOSXBrowser && ps.includes(preferredOSXBrowser) ? preferredOSXBrowser : supportedChromiumBrowsers.find((b) => ps.includes(b));
if (openedBrowser) {
await execAsync(
`osascript openChrome.applescript "${encodeURI(
url
)}" "${openedBrowser}"`,
{
cwd: join$2(VITE_PACKAGE_DIR, "bin")
}
);
return true;
}
} catch (err) {
}
}
if (process.platform === "darwin" && browser === "open") {
browser = void 0;
}
try {
const options = browser ? { app: { name: browser, arguments: browserArgs } } : {};
new Promise((_, reject) => {
open$1(url, options).then((subprocess) => {
subprocess.on("error", reject);
}).catch(reject);
}).catch((err) => {
logger.error(err.stack || err.message);
});
return true;
} catch (err) {
return false;
}
}
function execAsync(command, options) {
return new Promise((resolve, reject) => {
exec(command, options, (error, stdout) => {
if (error) {
reject(error);
} else {
resolve(stdout.toString());
}
});
});
}
function bindCLIShortcuts(server, opts) {
if (!server.httpServer || !process.stdin.isTTY || process.env.CI) {
return;
}
const isDev = isDevServer(server);
if (isDev) {
server._shortcutsOptions = opts;
}
if (opts?.print) {
server.config.logger.info(
colors$1.dim(colors$1.green(" \u279C")) + colors$1.dim(" press ") + colors$1.bold("h + enter") + colors$1.dim(" to show help")
);
}
const shortcuts = (opts?.customShortcuts ?? []).concat(
isDev ? BASE_DEV_SHORTCUTS : BASE_PREVIEW_SHORTCUTS
);
let actionRunning = false;
const onInput = async (input) => {
if (actionRunning) return;
if (input === "h") {
const loggedKeys = /* @__PURE__ */ new Set();
server.config.logger.info("\n Shortcuts");
for (const shortcut2 of shortcuts) {
if (loggedKeys.has(shortcut2.key)) continue;
loggedKeys.add(shortcut2.key);
if (shortcut2.action == null) continue;
server.config.logger.info(
colors$1.dim(" press ") + colors$1.bold(`${shortcut2.key} + enter`) + colors$1.dim(` to ${shortcut2.description}`)
);
}
return;
}
const shortcut = shortcuts.find((shortcut2) => shortcut2.key === input);
if (!shortcut || shortcut.action == null) return;
actionRunning = true;
await shortcut.action(server);
actionRunning = false;
};
const rl = readline.createInterface({ input: process.stdin });
rl.on("line", onInput);
server.httpServer.on("close", () => rl.close());
}
const BASE_DEV_SHORTCUTS = [
{
key: "r",
description: "restart the server",
async action(server) {
await restartServerWithUrls(server);
}
},
{
key: "u",
description: "show server url",
action(server) {
server.config.logger.info("");
server.printUrls();
}
},
{
key: "o",
description: "open in browser",
action(server) {
server.openBrowser();
}
},
{
key: "c",
description: "clear console",
action(server) {
server.config.logger.clearScreen("error");
}
},
{
key: "q",
description: "quit",
async action(server) {
try {
await server.close();
} finally {
process.exit();
}
}
}
];
const BASE_PREVIEW_SHORTCUTS = [
{
key: "o",
description: "open in browser",
action(server) {
const url = server.resolvedUrls?.local[0] ?? server.resolvedUrls?.network[0];
if (url) {
openBrowser(url, true, server.config.logger);
} else {
server.config.logger.warn("No URL available to open in browser");
}
}
},
{
key: "q",
description: "quit",
async action(server) {
try {
await server.close();
} finally {
process.exit();
}
}
}
];
function getResolvedOutDirs(root, outDir, outputOptions) {
const resolvedOutDir = path$n.resolve(root, outDir);
if (!outputOptions) return /* @__PURE__ */ new Set([resolvedOutDir]);
return new Set(
arraify(outputOptions).map(
({ dir }) => dir ? path$n.resolve(root, dir) : resolvedOutDir
)
);
}
function resolveEmptyOutDir(emptyOutDir, root, outDirs, logger) {
if (emptyOutDir != null) return emptyOutDir;
for (const outDir of outDirs) {
if (!normalizePath$3(outDir).startsWith(withTrailingSlash(root))) {
logger?.warn(
colors$1.yellow(
`
${colors$1.bold(`(!)`)} outDir ${colors$1.white(
colors$1.dim(outDir)
)} is not inside project root and will not be emptied.
Use --emptyOutDir to override.
`
)
);
return false;
}
}
return true;
}
function resolveChokidarOptions(config, options, resolvedOutDirs, emptyOutDir) {
const { ignored: ignoredList, ...otherOptions } = options ?? {};
const ignored = [
"**/.git/**",
"**/node_modules/**",
"**/test-results/**",
// Playwright
glob.escapePath(config.cacheDir) + "/**",
...arraify(ignoredList || [])
];
if (emptyOutDir) {
ignored.push(
...[...resolvedOutDirs].map((outDir) => glob.escapePath(outDir) + "/**")
);
}
const resolvedWatchOptions = {
ignored,
ignoreInitial: true,
ignorePermissionErrors: true,
...otherOptions
};
return resolvedWatchOptions;
}
class NoopWatcher extends EventEmitter$4 {
constructor(options) {
super();
this.options = options;
}
add() {
return this;
}
unwatch() {
return this;
}
getWatched() {
return {};
}
ref() {
return this;
}
unref() {
return this;
}
async close() {
}
}
function createNoopWatcher(options) {
return new NoopWatcher(options);
}
async function fetchModule(server, url, importer, options = {}) {
if (url.startsWith("data:") || isBuiltin(url)) {
return { externalize: url, type: "builtin" };
}
if (isExternalUrl(url)) {
return { externalize: url, type: "network" };
}
if (url[0] !== "." && url[0] !== "/") {
const {
isProduction,
resolve: { dedupe, preserveSymlinks },
root,
ssr
} = server.config;
const overrideConditions = ssr.resolve?.externalConditions || [];
const resolveOptions = {
mainFields: ["main"],
conditions: [],
overrideConditions: [...overrideConditions, "production", "development"],
extensions: [".js", ".cjs", ".json"],
dedupe,
preserveSymlinks,
isBuild: false,
isProduction,
root,
ssrConfig: ssr,
packageCache: server.config.packageCache
};
const resolved = tryNodeResolve(
url,
importer,
{ ...resolveOptions, tryEsmOnly: true },
false,
void 0,
true
);
if (!resolved) {
const err = new Error(
`Cannot find module '${url}' imported from '${importer}'`
);
err.code = "ERR_MODULE_NOT_FOUND";
throw err;
}
const file = pathToFileURL(resolved.id).toString();
const type = isFilePathESM(resolved.id, server.config.packageCache) ? "module" : "commonjs";
return { externalize: file, type };
}
url = unwrapId$1(url);
let result = await server.transformRequest(url, { ssr: true });
if (!result) {
throw new Error(
`[vite] transform failed for module '${url}'${importer ? ` imported from '${importer}'` : ""}.`
);
}
const mod = await server.moduleGraph.getModuleByUrl(url, true);
if (!mod) {
throw new Error(
`[vite] cannot find module '${url}' ${importer ? ` imported from '${importer}'` : ""}.`
);
}
if (options.inlineSourceMap !== false) {
result = inlineSourceMap(mod, result, options.processSourceMap);
}
if (result.code[0] === "#")
result.code = result.code.replace(/^#!.*/, (s) => " ".repeat(s.length));
return { code: result.code, file: mod.file };
}
const OTHER_SOURCE_MAP_REGEXP = new RegExp(
`//# ${SOURCEMAPPING_URL}=data:application/json[^,]+base64,([A-Za-z0-9+/=]+)$`,
"gm"
);
function inlineSourceMap(mod, result, processSourceMap) {
const map = result.map;
let code = result.code;
if (!map || !("version" in map) || code.includes(VITE_RUNTIME_SOURCEMAPPING_SOURCE))
return result;
OTHER_SOURCE_MAP_REGEXP.lastIndex = 0;
if (OTHER_SOURCE_MAP_REGEXP.test(code))
code = code.replace(OTHER_SOURCE_MAP_REGEXP, "");
const sourceMap = processSourceMap?.(map) || map;
result.code = `${code.trimEnd()}
//# sourceURL=${mod.id}
${VITE_RUNTIME_SOURCEMAPPING_SOURCE}
//# ${SOURCEMAPPING_URL}=${genSourceMapUrl(sourceMap)}
`;
return result;
}
function ssrFetchModule(server, id, importer) {
return fetchModule(server, id, importer, {
processSourceMap(map) {
return Object.assign({}, map, {
mappings: ";".repeat(asyncFunctionDeclarationPaddingLineCount) + map.mappings
});
}
});
}
var bufferUtil$1 = {exports: {}};
const BINARY_TYPES$2 = ['nodebuffer', 'arraybuffer', 'fragments'];
const hasBlob$1 = typeof Blob !== 'undefined';
if (hasBlob$1) BINARY_TYPES$2.push('blob');
var constants = {
BINARY_TYPES: BINARY_TYPES$2,
EMPTY_BUFFER: Buffer.alloc(0),
GUID: '258EAFA5-E914-47DA-95CA-C5AB0DC85B11',
hasBlob: hasBlob$1,
kForOnEventAttribute: Symbol('kIsForOnEventAttribute'),
kListener: Symbol('kListener'),
kStatusCode: Symbol('status-code'),
kWebSocket: Symbol('websocket'),
NOOP: () => {}
};
const { EMPTY_BUFFER: EMPTY_BUFFER$3 } = constants;
const FastBuffer$2 = Buffer[Symbol.species];
/**
* Merges an array of buffers into a new buffer.
*
* @param {Buffer[]} list The array of buffers to concat
* @param {Number} totalLength The total length of buffers in the list
* @return {Buffer} The resulting buffer
* @public
*/
function concat$1(list, totalLength) {
if (list.length === 0) return EMPTY_BUFFER$3;
if (list.length === 1) return list[0];
const target = Buffer.allocUnsafe(totalLength);
let offset = 0;
for (let i = 0; i < list.length; i++) {
const buf = list[i];
target.set(buf, offset);
offset += buf.length;
}
if (offset < totalLength) {
return new FastBuffer$2(target.buffer, target.byteOffset, offset);
}
return target;
}
/**
* Masks a buffer using the given mask.
*
* @param {Buffer} source The buffer to mask
* @param {Buffer} mask The mask to use
* @param {Buffer} output The buffer where to store the result
* @param {Number} offset The offset at which to start writing
* @param {Number} length The number of bytes to mask.
* @public
*/
function _mask(source, mask, output, offset, length) {
for (let i = 0; i < length; i++) {
output[offset + i] = source[i] ^ mask[i & 3];
}
}
/**
* Unmasks a buffer using the given mask.
*
* @param {Buffer} buffer The buffer to unmask
* @param {Buffer} mask The mask to use
* @public
*/
function _unmask(buffer, mask) {
for (let i = 0; i < buffer.length; i++) {
buffer[i] ^= mask[i & 3];
}
}
/**
* Converts a buffer to an `ArrayBuffer`.
*
* @param {Buffer} buf The buffer to convert
* @return {ArrayBuffer} Converted buffer
* @public
*/
function toArrayBuffer$1(buf) {
if (buf.length === buf.buffer.byteLength) {
return buf.buffer;
}
return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.length);
}
/**
* Converts `data` to a `Buffer`.
*
* @param {*} data The data to convert
* @return {Buffer} The buffer
* @throws {TypeError}
* @public
*/
function toBuffer$2(data) {
toBuffer$2.readOnly = true;
if (Buffer.isBuffer(data)) return data;
let buf;
if (data instanceof ArrayBuffer) {
buf = new FastBuffer$2(data);
} else if (ArrayBuffer.isView(data)) {
buf = new FastBuffer$2(data.buffer, data.byteOffset, data.byteLength);
} else {
buf = Buffer.from(data);
toBuffer$2.readOnly = false;
}
return buf;
}
bufferUtil$1.exports = {
concat: concat$1,
mask: _mask,
toArrayBuffer: toArrayBuffer$1,
toBuffer: toBuffer$2,
unmask: _unmask
};
/* istanbul ignore else */
if (!process.env.WS_NO_BUFFER_UTIL) {
try {
const bufferUtil = require('bufferutil');
bufferUtil$1.exports.mask = function (source, mask, output, offset, length) {
if (length < 48) _mask(source, mask, output, offset, length);
else bufferUtil.mask(source, mask, output, offset, length);
};
bufferUtil$1.exports.unmask = function (buffer, mask) {
if (buffer.length < 32) _unmask(buffer, mask);
else bufferUtil.unmask(buffer, mask);
};
} catch (e) {
// Continue regardless of the error.
}
}
var bufferUtilExports = bufferUtil$1.exports;
const kDone = Symbol('kDone');
const kRun = Symbol('kRun');
/**
* A very simple job queue with adjustable concurrency. Adapted from
* https://github.com/STRML/async-limiter
*/
let Limiter$1 = class Limiter {
/**
* Creates a new `Limiter`.
*
* @param {Number} [concurrency=Infinity] The maximum number of jobs allowed
* to run concurrently
*/
constructor(concurrency) {
this[kDone] = () => {
this.pending--;
this[kRun]();
};
this.concurrency = concurrency || Infinity;
this.jobs = [];
this.pending = 0;
}
/**
* Adds a job to the queue.
*
* @param {Function} job The job to run
* @public
*/
add(job) {
this.jobs.push(job);
this[kRun]();
}
/**
* Removes a job from the queue and runs it if possible.
*
* @private
*/
[kRun]() {
if (this.pending === this.concurrency) return;
if (this.jobs.length) {
const job = this.jobs.shift();
this.pending++;
job(this[kDone]);
}
}
};
var limiter = Limiter$1;
const zlib = zlib$1;
const bufferUtil = bufferUtilExports;
const Limiter = limiter;
const { kStatusCode: kStatusCode$2 } = constants;
const FastBuffer$1 = Buffer[Symbol.species];
const TRAILER = Buffer.from([0x00, 0x00, 0xff, 0xff]);
const kPerMessageDeflate = Symbol('permessage-deflate');
const kTotalLength = Symbol('total-length');
const kCallback = Symbol('callback');
const kBuffers = Symbol('buffers');
const kError$1 = Symbol('error');
//
// We limit zlib concurrency, which prevents severe memory fragmentation
// as documented in https://github.com/nodejs/node/issues/8871#issuecomment-250915913
// and https://github.com/websockets/ws/issues/1202
//
// Intentionally global; it's the global thread pool that's an issue.
//
let zlibLimiter;
/**
* permessage-deflate implementation.
*/
let PerMessageDeflate$4 = class PerMessageDeflate {
/**
* Creates a PerMessageDeflate instance.
*
* @param {Object} [options] Configuration options
* @param {(Boolean|Number)} [options.clientMaxWindowBits] Advertise support
* for, or request, a custom client window size
* @param {Boolean} [options.clientNoContextTakeover=false] Advertise/
* acknowledge disabling of client context takeover
* @param {Number} [options.concurrencyLimit=10] The number of concurrent
* calls to zlib
* @param {(Boolean|Number)} [options.serverMaxWindowBits] Request/confirm the
* use of a custom server window size
* @param {Boolean} [options.serverNoContextTakeover=false] Request/accept
* disabling of server context takeover
* @param {Number} [options.threshold=1024] Size (in bytes) below which
* messages should not be compressed if context takeover is disabled
* @param {Object} [options.zlibDeflateOptions] Options to pass to zlib on
* deflate
* @param {Object} [options.zlibInflateOptions] Options to pass to zlib on
* inflate
* @param {Boolean} [isServer=false] Create the instance in either server or
* client mode
* @param {Number} [maxPayload=0] The maximum allowed message length
*/
constructor(options, isServer, maxPayload) {
this._maxPayload = maxPayload | 0;
this._options = options || {};
this._threshold =
this._options.threshold !== undefined ? this._options.threshold : 1024;
this._isServer = !!isServer;
this._deflate = null;
this._inflate = null;
this.params = null;
if (!zlibLimiter) {
const concurrency =
this._options.concurrencyLimit !== undefined
? this._options.concurrencyLimit
: 10;
zlibLimiter = new Limiter(concurrency);
}
}
/**
* @type {String}
*/
static get extensionName() {
return 'permessage-deflate';
}
/**
* Create an extension negotiation offer.
*
* @return {Object} Extension parameters
* @public
*/
offer() {
const params = {};
if (this._options.serverNoContextTakeover) {
params.server_no_context_takeover = true;
}
if (this._options.clientNoContextTakeover) {
params.client_no_context_takeover = true;
}
if (this._options.serverMaxWindowBits) {
params.server_max_window_bits = this._options.serverMaxWindowBits;
}
if (this._options.clientMaxWindowBits) {
params.client_max_window_bits = this._options.clientMaxWindowBits;
} else if (this._options.clientMaxWindowBits == null) {
params.client_max_window_bits = true;
}
return params;
}
/**
* Accept an extension negotiation offer/response.
*
* @param {Array} configurations The extension negotiation offers/reponse
* @return {Object} Accepted configuration
* @public
*/
accept(configurations) {
configurations = this.normalizeParams(configurations);
this.params = this._isServer
? this.acceptAsServer(configurations)
: this.acceptAsClient(configurations);
return this.params;
}
/**
* Releases all resources used by the extension.
*
* @public
*/
cleanup() {
if (this._inflate) {
this._inflate.close();
this._inflate = null;
}
if (this._deflate) {
const callback = this._deflate[kCallback];
this._deflate.close();
this._deflate = null;
if (callback) {
callback(
new Error(
'The deflate stream was closed while data was being processed'
)
);
}
}
}
/**
* Accept an extension negotiation offer.
*
* @param {Array} offers The extension negotiation offers
* @return {Object} Accepted configuration
* @private
*/
acceptAsServer(offers) {
const opts = this._options;
const accepted = offers.find((params) => {
if (
(opts.serverNoContextTakeover === false &&
params.server_no_context_takeover) ||
(params.server_max_window_bits &&
(opts.serverMaxWindowBits === false ||
(typeof opts.serverMaxWindowBits === 'number' &&
opts.serverMaxWindowBits > params.server_max_window_bits))) ||
(typeof opts.clientMaxWindowBits === 'number' &&
!params.client_max_window_bits)
) {
return false;
}
return true;
});
if (!accepted) {
throw new Error('None of the extension offers can be accepted');
}
if (opts.serverNoContextTakeover) {
accepted.server_no_context_takeover = true;
}
if (opts.clientNoContextTakeover) {
accepted.client_no_context_takeover = true;
}
if (typeof opts.serverMaxWindowBits === 'number') {
accepted.server_max_window_bits = opts.serverMaxWindowBits;
}
if (typeof opts.clientMaxWindowBits === 'number') {
accepted.client_max_window_bits = opts.clientMaxWindowBits;
} else if (
accepted.client_max_window_bits === true ||
opts.clientMaxWindowBits === false
) {
delete accepted.client_max_window_bits;
}
return accepted;
}
/**
* Accept the extension negotiation response.
*
* @param {Array} response The extension negotiation response
* @return {Object} Accepted configuration
* @private
*/
acceptAsClient(response) {
const params = response[0];
if (
this._options.clientNoContextTakeover === false &&
params.client_no_context_takeover
) {
throw new Error('Unexpected parameter "client_no_context_takeover"');
}
if (!params.client_max_window_bits) {
if (typeof this._options.clientMaxWindowBits === 'number') {
params.client_max_window_bits = this._options.clientMaxWindowBits;
}
} else if (
this._options.clientMaxWindowBits === false ||
(typeof this._options.clientMaxWindowBits === 'number' &&
params.client_max_window_bits > this._options.clientMaxWindowBits)
) {
throw new Error(
'Unexpected or invalid parameter "client_max_window_bits"'
);
}
return params;
}
/**
* Normalize parameters.
*
* @param {Array} configurations The extension negotiation offers/reponse
* @return {Array} The offers/response with normalized parameters
* @private
*/
normalizeParams(configurations) {
configurations.forEach((params) => {
Object.keys(params).forEach((key) => {
let value = params[key];
if (value.length > 1) {
throw new Error(`Parameter "${key}" must have only a single value`);
}
value = value[0];
if (key === 'client_max_window_bits') {
if (value !== true) {
const num = +value;
if (!Number.isInteger(num) || num < 8 || num > 15) {
throw new TypeError(
`Invalid value for parameter "${key}": ${value}`
);
}
value = num;
} else if (!this._isServer) {
throw new TypeError(
`Invalid value for parameter "${key}": ${value}`
);
}
} else if (key === 'server_max_window_bits') {
const num = +value;
if (!Number.isInteger(num) || num < 8 || num > 15) {
throw new TypeError(
`Invalid value for parameter "${key}": ${value}`
);
}
value = num;
} else if (
key === 'client_no_context_takeover' ||
key === 'server_no_context_takeover'
) {
if (value !== true) {
throw new TypeError(
`Invalid value for parameter "${key}": ${value}`
);
}
} else {
throw new Error(`Unknown parameter "${key}"`);
}
params[key] = value;
});
});
return configurations;
}
/**
* Decompress data. Concurrency limited.
*
* @param {Buffer} data Compressed data
* @param {Boolean} fin Specifies whether or not this is the last fragment
* @param {Function} callback Callback
* @public
*/
decompress(data, fin, callback) {
zlibLimiter.add((done) => {
this._decompress(data, fin, (err, result) => {
done();
callback(err, result);
});
});
}
/**
* Compress data. Concurrency limited.
*
* @param {(Buffer|String)} data Data to compress
* @param {Boolean} fin Specifies whether or not this is the last fragment
* @param {Function} callback Callback
* @public
*/
compress(data, fin, callback) {
zlibLimiter.add((done) => {
this._compress(data, fin, (err, result) => {
done();
callback(err, result);
});
});
}
/**
* Decompress data.
*
* @param {Buffer} data Compressed data
* @param {Boolean} fin Specifies whether or not this is the last fragment
* @param {Function} callback Callback
* @private
*/
_decompress(data, fin, callback) {
const endpoint = this._isServer ? 'client' : 'server';
if (!this._inflate) {
const key = `${endpoint}_max_window_bits`;
const windowBits =
typeof this.params[key] !== 'number'
? zlib.Z_DEFAULT_WINDOWBITS
: this.params[key];
this._inflate = zlib.createInflateRaw({
...this._options.zlibInflateOptions,
windowBits
});
this._inflate[kPerMessageDeflate] = this;
this._inflate[kTotalLength] = 0;
this._inflate[kBuffers] = [];
this._inflate.on('error', inflateOnError);
this._inflate.on('data', inflateOnData);
}
this._inflate[kCallback] = callback;
this._inflate.write(data);
if (fin) this._inflate.write(TRAILER);
this._inflate.flush(() => {
const err = this._inflate[kError$1];
if (err) {
this._inflate.close();
this._inflate = null;
callback(err);
return;
}
const data = bufferUtil.concat(
this._inflate[kBuffers],
this._inflate[kTotalLength]
);
if (this._inflate._readableState.endEmitted) {
this._inflate.close();
this._inflate = null;
} else {
this._inflate[kTotalLength] = 0;
this._inflate[kBuffers] = [];
if (fin && this.params[`${endpoint}_no_context_takeover`]) {
this._inflate.reset();
}
}
callback(null, data);
});
}
/**
* Compress data.
*
* @param {(Buffer|String)} data Data to compress
* @param {Boolean} fin Specifies whether or not this is the last fragment
* @param {Function} callback Callback
* @private
*/
_compress(data, fin, callback) {
const endpoint = this._isServer ? 'server' : 'client';
if (!this._deflate) {
const key = `${endpoint}_max_window_bits`;
const windowBits =
typeof this.params[key] !== 'number'
? zlib.Z_DEFAULT_WINDOWBITS
: this.params[key];
this._deflate = zlib.createDeflateRaw({
...this._options.zlibDeflateOptions,
windowBits
});
this._deflate[kTotalLength] = 0;
this._deflate[kBuffers] = [];
this._deflate.on('data', deflateOnData);
}
this._deflate[kCallback] = callback;
this._deflate.write(data);
this._deflate.flush(zlib.Z_SYNC_FLUSH, () => {
if (!this._deflate) {
//
// The deflate stream was closed while data was being processed.
//
return;
}
let data = bufferUtil.concat(
this._deflate[kBuffers],
this._deflate[kTotalLength]
);
if (fin) {
data = new FastBuffer$1(data.buffer, data.byteOffset, data.length - 4);
}
//
// Ensure that the callback will not be called again in
// `PerMessageDeflate#cleanup()`.
//
this._deflate[kCallback] = null;
this._deflate[kTotalLength] = 0;
this._deflate[kBuffers] = [];
if (fin && this.params[`${endpoint}_no_context_takeover`]) {
this._deflate.reset();
}
callback(null, data);
});
}
};
var permessageDeflate = PerMessageDeflate$4;
/**
* The listener of the `zlib.DeflateRaw` stream `'data'` event.
*
* @param {Buffer} chunk A chunk of data
* @private
*/
function deflateOnData(chunk) {
this[kBuffers].push(chunk);
this[kTotalLength] += chunk.length;
}
/**
* The listener of the `zlib.InflateRaw` stream `'data'` event.
*
* @param {Buffer} chunk A chunk of data
* @private
*/
function inflateOnData(chunk) {
this[kTotalLength] += chunk.length;
if (
this[kPerMessageDeflate]._maxPayload < 1 ||
this[kTotalLength] <= this[kPerMessageDeflate]._maxPayload
) {
this[kBuffers].push(chunk);
return;
}
this[kError$1] = new RangeError('Max payload size exceeded');
this[kError$1].code = 'WS_ERR_UNSUPPORTED_MESSAGE_LENGTH';
this[kError$1][kStatusCode$2] = 1009;
this.removeListener('data', inflateOnData);
this.reset();
}
/**
* The listener of the `zlib.InflateRaw` stream `'error'` event.
*
* @param {Error} err The emitted error
* @private
*/
function inflateOnError(err) {
//
// There is no need to call `Zlib#close()` as the handle is automatically
// closed when an error is emitted.
//
this[kPerMessageDeflate]._inflate = null;
err[kStatusCode$2] = 1007;
this[kCallback](err);
}
var validation = {exports: {}};
const { isUtf8 } = require$$0$a;
const { hasBlob } = constants;
//
// Allowed token characters:
//
// '!', '#', '$', '%', '&', ''', '*', '+', '-',
// '.', 0-9, A-Z, '^', '_', '`', a-z, '|', '~'
//
// tokenChars[32] === 0 // ' '
// tokenChars[33] === 1 // '!'
// tokenChars[34] === 0 // '"'
// ...
//
// prettier-ignore
const tokenChars$2 = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0 - 15
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 16 - 31
0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, // 32 - 47
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, // 48 - 63
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 64 - 79
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, // 80 - 95
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 96 - 111
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0 // 112 - 127
];
/**
* Checks if a status code is allowed in a close frame.
*
* @param {Number} code The status code
* @return {Boolean} `true` if the status code is valid, else `false`
* @public
*/
function isValidStatusCode$2(code) {
return (
(code >= 1000 &&
code <= 1014 &&
code !== 1004 &&
code !== 1005 &&
code !== 1006) ||
(code >= 3000 && code <= 4999)
);
}
/**
* Checks if a given buffer contains only correct UTF-8.
* Ported from https://www.cl.cam.ac.uk/%7Emgk25/ucs/utf8_check.c by
* Markus Kuhn.
*
* @param {Buffer} buf The buffer to check
* @return {Boolean} `true` if `buf` contains only correct UTF-8, else `false`
* @public
*/
function _isValidUTF8(buf) {
const len = buf.length;
let i = 0;
while (i < len) {
if ((buf[i] & 0x80) === 0) {
// 0xxxxxxx
i++;
} else if ((buf[i] & 0xe0) === 0xc0) {
// 110xxxxx 10xxxxxx
if (
i + 1 === len ||
(buf[i + 1] & 0xc0) !== 0x80 ||
(buf[i] & 0xfe) === 0xc0 // Overlong
) {
return false;
}
i += 2;
} else if ((buf[i] & 0xf0) === 0xe0) {
// 1110xxxx 10xxxxxx 10xxxxxx
if (
i + 2 >= len ||
(buf[i + 1] & 0xc0) !== 0x80 ||
(buf[i + 2] & 0xc0) !== 0x80 ||
(buf[i] === 0xe0 && (buf[i + 1] & 0xe0) === 0x80) || // Overlong
(buf[i] === 0xed && (buf[i + 1] & 0xe0) === 0xa0) // Surrogate (U+D800 - U+DFFF)
) {
return false;
}
i += 3;
} else if ((buf[i] & 0xf8) === 0xf0) {
// 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
if (
i + 3 >= len ||
(buf[i + 1] & 0xc0) !== 0x80 ||
(buf[i + 2] & 0xc0) !== 0x80 ||
(buf[i + 3] & 0xc0) !== 0x80 ||
(buf[i] === 0xf0 && (buf[i + 1] & 0xf0) === 0x80) || // Overlong
(buf[i] === 0xf4 && buf[i + 1] > 0x8f) ||
buf[i] > 0xf4 // > U+10FFFF
) {
return false;
}
i += 4;
} else {
return false;
}
}
return true;
}
/**
* Determines whether a value is a `Blob`.
*
* @param {*} value The value to be tested
* @return {Boolean} `true` if `value` is a `Blob`, else `false`
* @private
*/
function isBlob$2(value) {
return (
hasBlob &&
typeof value === 'object' &&
typeof value.arrayBuffer === 'function' &&
typeof value.type === 'string' &&
typeof value.stream === 'function' &&
(value[Symbol.toStringTag] === 'Blob' ||
value[Symbol.toStringTag] === 'File')
);
}
validation.exports = {
isBlob: isBlob$2,
isValidStatusCode: isValidStatusCode$2,
isValidUTF8: _isValidUTF8,
tokenChars: tokenChars$2
};
if (isUtf8) {
validation.exports.isValidUTF8 = function (buf) {
return buf.length < 24 ? _isValidUTF8(buf) : isUtf8(buf);
};
} /* istanbul ignore else */ else if (!process.env.WS_NO_UTF_8_VALIDATE) {
try {
const isValidUTF8 = require('utf-8-validate');
validation.exports.isValidUTF8 = function (buf) {
return buf.length < 32 ? _isValidUTF8(buf) : isValidUTF8(buf);
};
} catch (e) {
// Continue regardless of the error.
}
}
var validationExports = validation.exports;
const { Writable: Writable$1 } = require$$0$6;
const PerMessageDeflate$3 = permessageDeflate;
const {
BINARY_TYPES: BINARY_TYPES$1,
EMPTY_BUFFER: EMPTY_BUFFER$2,
kStatusCode: kStatusCode$1,
kWebSocket: kWebSocket$3
} = constants;
const { concat, toArrayBuffer, unmask } = bufferUtilExports;
const { isValidStatusCode: isValidStatusCode$1, isValidUTF8 } = validationExports;
const FastBuffer = Buffer[Symbol.species];
const GET_INFO = 0;
const GET_PAYLOAD_LENGTH_16 = 1;
const GET_PAYLOAD_LENGTH_64 = 2;
const GET_MASK = 3;
const GET_DATA = 4;
const INFLATING = 5;
const DEFER_EVENT = 6;
/**
* HyBi Receiver implementation.
*
* @extends Writable
*/
let Receiver$1 = class Receiver extends Writable$1 {
/**
* Creates a Receiver instance.
*
* @param {Object} [options] Options object
* @param {Boolean} [options.allowSynchronousEvents=true] Specifies whether
* any of the `'message'`, `'ping'`, and `'pong'` events can be emitted
* multiple times in the same tick
* @param {String} [options.binaryType=nodebuffer] The type for binary data
* @param {Object} [options.extensions] An object containing the negotiated
* extensions
* @param {Boolean} [options.isServer=false] Specifies whether to operate in
* client or server mode
* @param {Number} [options.maxPayload=0] The maximum allowed message length
* @param {Boolean} [options.skipUTF8Validation=false] Specifies whether or
* not to skip UTF-8 validation for text and close messages
*/
constructor(options = {}) {
super();
this._allowSynchronousEvents =
options.allowSynchronousEvents !== undefined
? options.allowSynchronousEvents
: true;
this._binaryType = options.binaryType || BINARY_TYPES$1[0];
this._extensions = options.extensions || {};
this._isServer = !!options.isServer;
this._maxPayload = options.maxPayload | 0;
this._skipUTF8Validation = !!options.skipUTF8Validation;
this[kWebSocket$3] = undefined;
this._bufferedBytes = 0;
this._buffers = [];
this._compressed = false;
this._payloadLength = 0;
this._mask = undefined;
this._fragmented = 0;
this._masked = false;
this._fin = false;
this._opcode = 0;
this._totalPayloadLength = 0;
this._messageLength = 0;
this._fragments = [];
this._errored = false;
this._loop = false;
this._state = GET_INFO;
}
/**
* Implements `Writable.prototype._write()`.
*
* @param {Buffer} chunk The chunk of data to write
* @param {String} encoding The character encoding of `chunk`
* @param {Function} cb Callback
* @private
*/
_write(chunk, encoding, cb) {
if (this._opcode === 0x08 && this._state == GET_INFO) return cb();
this._bufferedBytes += chunk.length;
this._buffers.push(chunk);
this.startLoop(cb);
}
/**
* Consumes `n` bytes from the buffered data.
*
* @param {Number} n The number of bytes to consume
* @return {Buffer} The consumed bytes
* @private
*/
consume(n) {
this._bufferedBytes -= n;
if (n === this._buffers[0].length) return this._buffers.shift();
if (n < this._buffers[0].length) {
const buf = this._buffers[0];
this._buffers[0] = new FastBuffer(
buf.buffer,
buf.byteOffset + n,
buf.length - n
);
return new FastBuffer(buf.buffer, buf.byteOffset, n);
}
const dst = Buffer.allocUnsafe(n);
do {
const buf = this._buffers[0];
const offset = dst.length - n;
if (n >= buf.length) {
dst.set(this._buffers.shift(), offset);
} else {
dst.set(new Uint8Array(buf.buffer, buf.byteOffset, n), offset);
this._buffers[0] = new FastBuffer(
buf.buffer,
buf.byteOffset + n,
buf.length - n
);
}
n -= buf.length;
} while (n > 0);
return dst;
}
/**
* Starts the parsing loop.
*
* @param {Function} cb Callback
* @private
*/
startLoop(cb) {
this._loop = true;
do {
switch (this._state) {
case GET_INFO:
this.getInfo(cb);
break;
case GET_PAYLOAD_LENGTH_16:
this.getPayloadLength16(cb);
break;
case GET_PAYLOAD_LENGTH_64:
this.getPayloadLength64(cb);
break;
case GET_MASK:
this.getMask();
break;
case GET_DATA:
this.getData(cb);
break;
case INFLATING:
case DEFER_EVENT:
this._loop = false;
return;
}
} while (this._loop);
if (!this._errored) cb();
}
/**
* Reads the first two bytes of a frame.
*
* @param {Function} cb Callback
* @private
*/
getInfo(cb) {
if (this._bufferedBytes < 2) {
this._loop = false;
return;
}
const buf = this.consume(2);
if ((buf[0] & 0x30) !== 0x00) {
const error = this.createError(
RangeError,
'RSV2 and RSV3 must be clear',
true,
1002,
'WS_ERR_UNEXPECTED_RSV_2_3'
);
cb(error);
return;
}
const compressed = (buf[0] & 0x40) === 0x40;
if (compressed && !this._extensions[PerMessageDeflate$3.extensionName]) {
const error = this.createError(
RangeError,
'RSV1 must be clear',
true,
1002,
'WS_ERR_UNEXPECTED_RSV_1'
);
cb(error);
return;
}
this._fin = (buf[0] & 0x80) === 0x80;
this._opcode = buf[0] & 0x0f;
this._payloadLength = buf[1] & 0x7f;
if (this._opcode === 0x00) {
if (compressed) {
const error = this.createError(
RangeError,
'RSV1 must be clear',
true,
1002,
'WS_ERR_UNEXPECTED_RSV_1'
);
cb(error);
return;
}
if (!this._fragmented) {
const error = this.createError(
RangeError,
'invalid opcode 0',
true,
1002,
'WS_ERR_INVALID_OPCODE'
);
cb(error);
return;
}
this._opcode = this._fragmented;
} else if (this._opcode === 0x01 || this._opcode === 0x02) {
if (this._fragmented) {
const error = this.createError(
RangeError,
`invalid opcode ${this._opcode}`,
true,
1002,
'WS_ERR_INVALID_OPCODE'
);
cb(error);
return;
}
this._compressed = compressed;
} else if (this._opcode > 0x07 && this._opcode < 0x0b) {
if (!this._fin) {
const error = this.createError(
RangeError,
'FIN must be set',
true,
1002,
'WS_ERR_EXPECTED_FIN'
);
cb(error);
return;
}
if (compressed) {
const error = this.createError(
RangeError,
'RSV1 must be clear',
true,
1002,
'WS_ERR_UNEXPECTED_RSV_1'
);
cb(error);
return;
}
if (
this._payloadLength > 0x7d ||
(this._opcode === 0x08 && this._payloadLength === 1)
) {
const error = this.createError(
RangeError,
`invalid payload length ${this._payloadLength}`,
true,
1002,
'WS_ERR_INVALID_CONTROL_PAYLOAD_LENGTH'
);
cb(error);
return;
}
} else {
const error = this.createError(
RangeError,
`invalid opcode ${this._opcode}`,
true,
1002,
'WS_ERR_INVALID_OPCODE'
);
cb(error);
return;
}
if (!this._fin && !this._fragmented) this._fragmented = this._opcode;
this._masked = (buf[1] & 0x80) === 0x80;
if (this._isServer) {
if (!this._masked) {
const error = this.createError(
RangeError,
'MASK must be set',
true,
1002,
'WS_ERR_EXPECTED_MASK'
);
cb(error);
return;
}
} else if (this._masked) {
const error = this.createError(
RangeError,
'MASK must be clear',
true,
1002,
'WS_ERR_UNEXPECTED_MASK'
);
cb(error);
return;
}
if (this._payloadLength === 126) this._state = GET_PAYLOAD_LENGTH_16;
else if (this._payloadLength === 127) this._state = GET_PAYLOAD_LENGTH_64;
else this.haveLength(cb);
}
/**
* Gets extended payload length (7+16).
*
* @param {Function} cb Callback
* @private
*/
getPayloadLength16(cb) {
if (this._bufferedBytes < 2) {
this._loop = false;
return;
}
this._payloadLength = this.consume(2).readUInt16BE(0);
this.haveLength(cb);
}
/**
* Gets extended payload length (7+64).
*
* @param {Function} cb Callback
* @private
*/
getPayloadLength64(cb) {
if (this._bufferedBytes < 8) {
this._loop = false;
return;
}
const buf = this.consume(8);
const num = buf.readUInt32BE(0);
//
// The maximum safe integer in JavaScript is 2^53 - 1. An error is returned
// if payload length is greater than this number.
//
if (num > Math.pow(2, 53 - 32) - 1) {
const error = this.createError(
RangeError,
'Unsupported WebSocket frame: payload length > 2^53 - 1',
false,
1009,
'WS_ERR_UNSUPPORTED_DATA_PAYLOAD_LENGTH'
);
cb(error);
return;
}
this._payloadLength = num * Math.pow(2, 32) + buf.readUInt32BE(4);
this.haveLength(cb);
}
/**
* Payload length has been read.
*
* @param {Function} cb Callback
* @private
*/
haveLength(cb) {
if (this._payloadLength && this._opcode < 0x08) {
this._totalPayloadLength += this._payloadLength;
if (this._totalPayloadLength > this._maxPayload && this._maxPayload > 0) {
const error = this.createError(
RangeError,
'Max payload size exceeded',
false,
1009,
'WS_ERR_UNSUPPORTED_MESSAGE_LENGTH'
);
cb(error);
return;
}
}
if (this._masked) this._state = GET_MASK;
else this._state = GET_DATA;
}
/**
* Reads mask bytes.
*
* @private
*/
getMask() {
if (this._bufferedBytes < 4) {
this._loop = false;
return;
}
this._mask = this.consume(4);
this._state = GET_DATA;
}
/**
* Reads data bytes.
*
* @param {Function} cb Callback
* @private
*/
getData(cb) {
let data = EMPTY_BUFFER$2;
if (this._payloadLength) {
if (this._bufferedBytes < this._payloadLength) {
this._loop = false;
return;
}
data = this.consume(this._payloadLength);
if (
this._masked &&
(this._mask[0] | this._mask[1] | this._mask[2] | this._mask[3]) !== 0
) {
unmask(data, this._mask);
}
}
if (this._opcode > 0x07) {
this.controlMessage(data, cb);
return;
}
if (this._compressed) {
this._state = INFLATING;
this.decompress(data, cb);
return;
}
if (data.length) {
//
// This message is not compressed so its length is the sum of the payload
// length of all fragments.
//
this._messageLength = this._totalPayloadLength;
this._fragments.push(data);
}
this.dataMessage(cb);
}
/**
* Decompresses data.
*
* @param {Buffer} data Compressed data
* @param {Function} cb Callback
* @private
*/
decompress(data, cb) {
const perMessageDeflate = this._extensions[PerMessageDeflate$3.extensionName];
perMessageDeflate.decompress(data, this._fin, (err, buf) => {
if (err) return cb(err);
if (buf.length) {
this._messageLength += buf.length;
if (this._messageLength > this._maxPayload && this._maxPayload > 0) {
const error = this.createError(
RangeError,
'Max payload size exceeded',
false,
1009,
'WS_ERR_UNSUPPORTED_MESSAGE_LENGTH'
);
cb(error);
return;
}
this._fragments.push(buf);
}
this.dataMessage(cb);
if (this._state === GET_INFO) this.startLoop(cb);
});
}
/**
* Handles a data message.
*
* @param {Function} cb Callback
* @private
*/
dataMessage(cb) {
if (!this._fin) {
this._state = GET_INFO;
return;
}
const messageLength = this._messageLength;
const fragments = this._fragments;
this._totalPayloadLength = 0;
this._messageLength = 0;
this._fragmented = 0;
this._fragments = [];
if (this._opcode === 2) {
let data;
if (this._binaryType === 'nodebuffer') {
data = concat(fragments, messageLength);
} else if (this._binaryType === 'arraybuffer') {
data = toArrayBuffer(concat(fragments, messageLength));
} else if (this._binaryType === 'blob') {
data = new Blob(fragments);
} else {
data = fragments;
}
if (this._allowSynchronousEvents) {
this.emit('message', data, true);
this._state = GET_INFO;
} else {
this._state = DEFER_EVENT;
setImmediate(() => {
this.emit('message', data, true);
this._state = GET_INFO;
this.startLoop(cb);
});
}
} else {
const buf = concat(fragments, messageLength);
if (!this._skipUTF8Validation && !isValidUTF8(buf)) {
const error = this.createError(
Error,
'invalid UTF-8 sequence',
true,
1007,
'WS_ERR_INVALID_UTF8'
);
cb(error);
return;
}
if (this._state === INFLATING || this._allowSynchronousEvents) {
this.emit('message', buf, false);
this._state = GET_INFO;
} else {
this._state = DEFER_EVENT;
setImmediate(() => {
this.emit('message', buf, false);
this._state = GET_INFO;
this.startLoop(cb);
});
}
}
}
/**
* Handles a control message.
*
* @param {Buffer} data Data to handle
* @return {(Error|RangeError|undefined)} A possible error
* @private
*/
controlMessage(data, cb) {
if (this._opcode === 0x08) {
if (data.length === 0) {
this._loop = false;
this.emit('conclude', 1005, EMPTY_BUFFER$2);
this.end();
} else {
const code = data.readUInt16BE(0);
if (!isValidStatusCode$1(code)) {
const error = this.createError(
RangeError,
`invalid status code ${code}`,
true,
1002,
'WS_ERR_INVALID_CLOSE_CODE'
);
cb(error);
return;
}
const buf = new FastBuffer(
data.buffer,
data.byteOffset + 2,
data.length - 2
);
if (!this._skipUTF8Validation && !isValidUTF8(buf)) {
const error = this.createError(
Error,
'invalid UTF-8 sequence',
true,
1007,
'WS_ERR_INVALID_UTF8'
);
cb(error);
return;
}
this._loop = false;
this.emit('conclude', code, buf);
this.end();
}
this._state = GET_INFO;
return;
}
if (this._allowSynchronousEvents) {
this.emit(this._opcode === 0x09 ? 'ping' : 'pong', data);
this._state = GET_INFO;
} else {
this._state = DEFER_EVENT;
setImmediate(() => {
this.emit(this._opcode === 0x09 ? 'ping' : 'pong', data);
this._state = GET_INFO;
this.startLoop(cb);
});
}
}
/**
* Builds an error object.
*
* @param {function(new:Error|RangeError)} ErrorCtor The error constructor
* @param {String} message The error message
* @param {Boolean} prefix Specifies whether or not to add a default prefix to
* `message`
* @param {Number} statusCode The status code
* @param {String} errorCode The exposed error code
* @return {(Error|RangeError)} The error
* @private
*/
createError(ErrorCtor, message, prefix, statusCode, errorCode) {
this._loop = false;
this._errored = true;
const err = new ErrorCtor(
prefix ? `Invalid WebSocket frame: ${message}` : message
);
Error.captureStackTrace(err, this.createError);
err.code = errorCode;
err[kStatusCode$1] = statusCode;
return err;
}
};
var receiver = Receiver$1;
/* eslint no-unused-vars: ["error", { "varsIgnorePattern": "^Duplex" }] */
const { randomFillSync } = require$$3$1;
const PerMessageDeflate$2 = permessageDeflate;
const { EMPTY_BUFFER: EMPTY_BUFFER$1, kWebSocket: kWebSocket$2, NOOP: NOOP$2 } = constants;
const { isBlob: isBlob$1, isValidStatusCode } = validationExports;
const { mask: applyMask, toBuffer: toBuffer$1 } = bufferUtilExports;
const kByteLength = Symbol('kByteLength');
const maskBuffer = Buffer.alloc(4);
const RANDOM_POOL_SIZE = 8 * 1024;
let randomPool;
let randomPoolPointer = RANDOM_POOL_SIZE;
const DEFAULT = 0;
const DEFLATING = 1;
const GET_BLOB_DATA = 2;
/**
* HyBi Sender implementation.
*/
let Sender$1 = class Sender {
/**
* Creates a Sender instance.
*
* @param {Duplex} socket The connection socket
* @param {Object} [extensions] An object containing the negotiated extensions
* @param {Function} [generateMask] The function used to generate the masking
* key
*/
constructor(socket, extensions, generateMask) {
this._extensions = extensions || {};
if (generateMask) {
this._generateMask = generateMask;
this._maskBuffer = Buffer.alloc(4);
}
this._socket = socket;
this._firstFragment = true;
this._compress = false;
this._bufferedBytes = 0;
this._queue = [];
this._state = DEFAULT;
this.onerror = NOOP$2;
this[kWebSocket$2] = undefined;
}
/**
* Frames a piece of data according to the HyBi WebSocket protocol.
*
* @param {(Buffer|String)} data The data to frame
* @param {Object} options Options object
* @param {Boolean} [options.fin=false] Specifies whether or not to set the
* FIN bit
* @param {Function} [options.generateMask] The function used to generate the
* masking key
* @param {Boolean} [options.mask=false] Specifies whether or not to mask
* `data`
* @param {Buffer} [options.maskBuffer] The buffer used to store the masking
* key
* @param {Number} options.opcode The opcode
* @param {Boolean} [options.readOnly=false] Specifies whether `data` can be
* modified
* @param {Boolean} [options.rsv1=false] Specifies whether or not to set the
* RSV1 bit
* @return {(Buffer|String)[]} The framed data
* @public
*/
static frame(data, options) {
let mask;
let merge = false;
let offset = 2;
let skipMasking = false;
if (options.mask) {
mask = options.maskBuffer || maskBuffer;
if (options.generateMask) {
options.generateMask(mask);
} else {
if (randomPoolPointer === RANDOM_POOL_SIZE) {
/* istanbul ignore else */
if (randomPool === undefined) {
//
// This is lazily initialized because server-sent frames must not
// be masked so it may never be used.
//
randomPool = Buffer.alloc(RANDOM_POOL_SIZE);
}
randomFillSync(randomPool, 0, RANDOM_POOL_SIZE);
randomPoolPointer = 0;
}
mask[0] = randomPool[randomPoolPointer++];
mask[1] = randomPool[randomPoolPointer++];
mask[2] = randomPool[randomPoolPointer++];
mask[3] = randomPool[randomPoolPointer++];
}
skipMasking = (mask[0] | mask[1] | mask[2] | mask[3]) === 0;
offset = 6;
}
let dataLength;
if (typeof data === 'string') {
if (
(!options.mask || skipMasking) &&
options[kByteLength] !== undefined
) {
dataLength = options[kByteLength];
} else {
data = Buffer.from(data);
dataLength = data.length;
}
} else {
dataLength = data.length;
merge = options.mask && options.readOnly && !skipMasking;
}
let payloadLength = dataLength;
if (dataLength >= 65536) {
offset += 8;
payloadLength = 127;
} else if (dataLength > 125) {
offset += 2;
payloadLength = 126;
}
const target = Buffer.allocUnsafe(merge ? dataLength + offset : offset);
target[0] = options.fin ? options.opcode | 0x80 : options.opcode;
if (options.rsv1) target[0] |= 0x40;
target[1] = payloadLength;
if (payloadLength === 126) {
target.writeUInt16BE(dataLength, 2);
} else if (payloadLength === 127) {
target[2] = target[3] = 0;
target.writeUIntBE(dataLength, 4, 6);
}
if (!options.mask) return [target, data];
target[1] |= 0x80;
target[offset - 4] = mask[0];
target[offset - 3] = mask[1];
target[offset - 2] = mask[2];
target[offset - 1] = mask[3];
if (skipMasking) return [target, data];
if (merge) {
applyMask(data, mask, target, offset, dataLength);
return [target];
}
applyMask(data, mask, data, 0, dataLength);
return [target, data];
}
/**
* Sends a close message to the other peer.
*
* @param {Number} [code] The status code component of the body
* @param {(String|Buffer)} [data] The message component of the body
* @param {Boolean} [mask=false] Specifies whether or not to mask the message
* @param {Function} [cb] Callback
* @public
*/
close(code, data, mask, cb) {
let buf;
if (code === undefined) {
buf = EMPTY_BUFFER$1;
} else if (typeof code !== 'number' || !isValidStatusCode(code)) {
throw new TypeError('First argument must be a valid error code number');
} else if (data === undefined || !data.length) {
buf = Buffer.allocUnsafe(2);
buf.writeUInt16BE(code, 0);
} else {
const length = Buffer.byteLength(data);
if (length > 123) {
throw new RangeError('The message must not be greater than 123 bytes');
}
buf = Buffer.allocUnsafe(2 + length);
buf.writeUInt16BE(code, 0);
if (typeof data === 'string') {
buf.write(data, 2);
} else {
buf.set(data, 2);
}
}
const options = {
[kByteLength]: buf.length,
fin: true,
generateMask: this._generateMask,
mask,
maskBuffer: this._maskBuffer,
opcode: 0x08,
readOnly: false,
rsv1: false
};
if (this._state !== DEFAULT) {
this.enqueue([this.dispatch, buf, false, options, cb]);
} else {
this.sendFrame(Sender.frame(buf, options), cb);
}
}
/**
* Sends a ping message to the other peer.
*
* @param {*} data The message to send
* @param {Boolean} [mask=false] Specifies whether or not to mask `data`
* @param {Function} [cb] Callback
* @public
*/
ping(data, mask, cb) {
let byteLength;
let readOnly;
if (typeof data === 'string') {
byteLength = Buffer.byteLength(data);
readOnly = false;
} else if (isBlob$1(data)) {
byteLength = data.size;
readOnly = false;
} else {
data = toBuffer$1(data);
byteLength = data.length;
readOnly = toBuffer$1.readOnly;
}
if (byteLength > 125) {
throw new RangeError('The data size must not be greater than 125 bytes');
}
const options = {
[kByteLength]: byteLength,
fin: true,
generateMask: this._generateMask,
mask,
maskBuffer: this._maskBuffer,
opcode: 0x09,
readOnly,
rsv1: false
};
if (isBlob$1(data)) {
if (this._state !== DEFAULT) {
this.enqueue([this.getBlobData, data, false, options, cb]);
} else {
this.getBlobData(data, false, options, cb);
}
} else if (this._state !== DEFAULT) {
this.enqueue([this.dispatch, data, false, options, cb]);
} else {
this.sendFrame(Sender.frame(data, options), cb);
}
}
/**
* Sends a pong message to the other peer.
*
* @param {*} data The message to send
* @param {Boolean} [mask=false] Specifies whether or not to mask `data`
* @param {Function} [cb] Callback
* @public
*/
pong(data, mask, cb) {
let byteLength;
let readOnly;
if (typeof data === 'string') {
byteLength = Buffer.byteLength(data);
readOnly = false;
} else if (isBlob$1(data)) {
byteLength = data.size;
readOnly = false;
} else {
data = toBuffer$1(data);
byteLength = data.length;
readOnly = toBuffer$1.readOnly;
}
if (byteLength > 125) {
throw new RangeError('The data size must not be greater than 125 bytes');
}
const options = {
[kByteLength]: byteLength,
fin: true,
generateMask: this._generateMask,
mask,
maskBuffer: this._maskBuffer,
opcode: 0x0a,
readOnly,
rsv1: false
};
if (isBlob$1(data)) {
if (this._state !== DEFAULT) {
this.enqueue([this.getBlobData, data, false, options, cb]);
} else {
this.getBlobData(data, false, options, cb);
}
} else if (this._state !== DEFAULT) {
this.enqueue([this.dispatch, data, false, options, cb]);
} else {
this.sendFrame(Sender.frame(data, options), cb);
}
}
/**
* Sends a data message to the other peer.
*
* @param {*} data The message to send
* @param {Object} options Options object
* @param {Boolean} [options.binary=false] Specifies whether `data` is binary
* or text
* @param {Boolean} [options.compress=false] Specifies whether or not to
* compress `data`
* @param {Boolean} [options.fin=false] Specifies whether the fragment is the
* last one
* @param {Boolean} [options.mask=false] Specifies whether or not to mask
* `data`
* @param {Function} [cb] Callback
* @public
*/
send(data, options, cb) {
const perMessageDeflate = this._extensions[PerMessageDeflate$2.extensionName];
let opcode = options.binary ? 2 : 1;
let rsv1 = options.compress;
let byteLength;
let readOnly;
if (typeof data === 'string') {
byteLength = Buffer.byteLength(data);
readOnly = false;
} else if (isBlob$1(data)) {
byteLength = data.size;
readOnly = false;
} else {
data = toBuffer$1(data);
byteLength = data.length;
readOnly = toBuffer$1.readOnly;
}
if (this._firstFragment) {
this._firstFragment = false;
if (
rsv1 &&
perMessageDeflate &&
perMessageDeflate.params[
perMessageDeflate._isServer
? 'server_no_context_takeover'
: 'client_no_context_takeover'
]
) {
rsv1 = byteLength >= perMessageDeflate._threshold;
}
this._compress = rsv1;
} else {
rsv1 = false;
opcode = 0;
}
if (options.fin) this._firstFragment = true;
const opts = {
[kByteLength]: byteLength,
fin: options.fin,
generateMask: this._generateMask,
mask: options.mask,
maskBuffer: this._maskBuffer,
opcode,
readOnly,
rsv1
};
if (isBlob$1(data)) {
if (this._state !== DEFAULT) {
this.enqueue([this.getBlobData, data, this._compress, opts, cb]);
} else {
this.getBlobData(data, this._compress, opts, cb);
}
} else if (this._state !== DEFAULT) {
this.enqueue([this.dispatch, data, this._compress, opts, cb]);
} else {
this.dispatch(data, this._compress, opts, cb);
}
}
/**
* Gets the contents of a blob as binary data.
*
* @param {Blob} blob The blob
* @param {Boolean} [compress=false] Specifies whether or not to compress
* the data
* @param {Object} options Options object
* @param {Boolean} [options.fin=false] Specifies whether or not to set the
* FIN bit
* @param {Function} [options.generateMask] The function used to generate the
* masking key
* @param {Boolean} [options.mask=false] Specifies whether or not to mask
* `data`
* @param {Buffer} [options.maskBuffer] The buffer used to store the masking
* key
* @param {Number} options.opcode The opcode
* @param {Boolean} [options.readOnly=false] Specifies whether `data` can be
* modified
* @param {Boolean} [options.rsv1=false] Specifies whether or not to set the
* RSV1 bit
* @param {Function} [cb] Callback
* @private
*/
getBlobData(blob, compress, options, cb) {
this._bufferedBytes += options[kByteLength];
this._state = GET_BLOB_DATA;
blob
.arrayBuffer()
.then((arrayBuffer) => {
if (this._socket.destroyed) {
const err = new Error(
'The socket was closed while the blob was being read'
);
//
// `callCallbacks` is called in the next tick to ensure that errors
// that might be thrown in the callbacks behave like errors thrown
// outside the promise chain.
//
process.nextTick(callCallbacks, this, err, cb);
return;
}
this._bufferedBytes -= options[kByteLength];
const data = toBuffer$1(arrayBuffer);
if (!compress) {
this._state = DEFAULT;
this.sendFrame(Sender.frame(data, options), cb);
this.dequeue();
} else {
this.dispatch(data, compress, options, cb);
}
})
.catch((err) => {
//
// `onError` is called in the next tick for the same reason that
// `callCallbacks` above is.
//
process.nextTick(onError, this, err, cb);
});
}
/**
* Dispatches a message.
*
* @param {(Buffer|String)} data The message to send
* @param {Boolean} [compress=false] Specifies whether or not to compress
* `data`
* @param {Object} options Options object
* @param {Boolean} [options.fin=false] Specifies whether or not to set the
* FIN bit
* @param {Function} [options.generateMask] The function used to generate the
* masking key
* @param {Boolean} [options.mask=false] Specifies whether or not to mask
* `data`
* @param {Buffer} [options.maskBuffer] The buffer used to store the masking
* key
* @param {Number} options.opcode The opcode
* @param {Boolean} [options.readOnly=false] Specifies whether `data` can be
* modified
* @param {Boolean} [options.rsv1=false] Specifies whether or not to set the
* RSV1 bit
* @param {Function} [cb] Callback
* @private
*/
dispatch(data, compress, options, cb) {
if (!compress) {
this.sendFrame(Sender.frame(data, options), cb);
return;
}
const perMessageDeflate = this._extensions[PerMessageDeflate$2.extensionName];
this._bufferedBytes += options[kByteLength];
this._state = DEFLATING;
perMessageDeflate.compress(data, options.fin, (_, buf) => {
if (this._socket.destroyed) {
const err = new Error(
'The socket was closed while data was being compressed'
);
callCallbacks(this, err, cb);
return;
}
this._bufferedBytes -= options[kByteLength];
this._state = DEFAULT;
options.readOnly = false;
this.sendFrame(Sender.frame(buf, options), cb);
this.dequeue();
});
}
/**
* Executes queued send operations.
*
* @private
*/
dequeue() {
while (this._state === DEFAULT && this._queue.length) {
const params = this._queue.shift();
this._bufferedBytes -= params[3][kByteLength];
Reflect.apply(params[0], this, params.slice(1));
}
}
/**
* Enqueues a send operation.
*
* @param {Array} params Send operation parameters.
* @private
*/
enqueue(params) {
this._bufferedBytes += params[3][kByteLength];
this._queue.push(params);
}
/**
* Sends a frame.
*
* @param {Buffer[]} list The frame to send
* @param {Function} [cb] Callback
* @private
*/
sendFrame(list, cb) {
if (list.length === 2) {
this._socket.cork();
this._socket.write(list[0]);
this._socket.write(list[1], cb);
this._socket.uncork();
} else {
this._socket.write(list[0], cb);
}
}
};
var sender = Sender$1;
/**
* Calls queued callbacks with an error.
*
* @param {Sender} sender The `Sender` instance
* @param {Error} err The error to call the callbacks with
* @param {Function} [cb] The first callback
* @private
*/
function callCallbacks(sender, err, cb) {
if (typeof cb === 'function') cb(err);
for (let i = 0; i < sender._queue.length; i++) {
const params = sender._queue[i];
const callback = params[params.length - 1];
if (typeof callback === 'function') callback(err);
}
}
/**
* Handles a `Sender` error.
*
* @param {Sender} sender The `Sender` instance
* @param {Error} err The error
* @param {Function} [cb] The first pending callback
* @private
*/
function onError(sender, err, cb) {
callCallbacks(sender, err, cb);
sender.onerror(err);
}
const { kForOnEventAttribute: kForOnEventAttribute$1, kListener: kListener$1 } = constants;
const kCode = Symbol('kCode');
const kData = Symbol('kData');
const kError = Symbol('kError');
const kMessage = Symbol('kMessage');
const kReason = Symbol('kReason');
const kTarget = Symbol('kTarget');
const kType = Symbol('kType');
const kWasClean = Symbol('kWasClean');
/**
* Class representing an event.
*/
let Event$1 = class Event {
/**
* Create a new `Event`.
*
* @param {String} type The name of the event
* @throws {TypeError} If the `type` argument is not specified
*/
constructor(type) {
this[kTarget] = null;
this[kType] = type;
}
/**
* @type {*}
*/
get target() {
return this[kTarget];
}
/**
* @type {String}
*/
get type() {
return this[kType];
}
};
Object.defineProperty(Event$1.prototype, 'target', { enumerable: true });
Object.defineProperty(Event$1.prototype, 'type', { enumerable: true });
/**
* Class representing a close event.
*
* @extends Event
*/
class CloseEvent extends Event$1 {
/**
* Create a new `CloseEvent`.
*
* @param {String} type The name of the event
* @param {Object} [options] A dictionary object that allows for setting
* attributes via object members of the same name
* @param {Number} [options.code=0] The status code explaining why the
* connection was closed
* @param {String} [options.reason=''] A human-readable string explaining why
* the connection was closed
* @param {Boolean} [options.wasClean=false] Indicates whether or not the
* connection was cleanly closed
*/
constructor(type, options = {}) {
super(type);
this[kCode] = options.code === undefined ? 0 : options.code;
this[kReason] = options.reason === undefined ? '' : options.reason;
this[kWasClean] = options.wasClean === undefined ? false : options.wasClean;
}
/**
* @type {Number}
*/
get code() {
return this[kCode];
}
/**
* @type {String}
*/
get reason() {
return this[kReason];
}
/**
* @type {Boolean}
*/
get wasClean() {
return this[kWasClean];
}
}
Object.defineProperty(CloseEvent.prototype, 'code', { enumerable: true });
Object.defineProperty(CloseEvent.prototype, 'reason', { enumerable: true });
Object.defineProperty(CloseEvent.prototype, 'wasClean', { enumerable: true });
/**
* Class representing an error event.
*
* @extends Event
*/
class ErrorEvent extends Event$1 {
/**
* Create a new `ErrorEvent`.
*
* @param {String} type The name of the event
* @param {Object} [options] A dictionary object that allows for setting
* attributes via object members of the same name
* @param {*} [options.error=null] The error that generated this event
* @param {String} [options.message=''] The error message
*/
constructor(type, options = {}) {
super(type);
this[kError] = options.error === undefined ? null : options.error;
this[kMessage] = options.message === undefined ? '' : options.message;
}
/**
* @type {*}
*/
get error() {
return this[kError];
}
/**
* @type {String}
*/
get message() {
return this[kMessage];
}
}
Object.defineProperty(ErrorEvent.prototype, 'error', { enumerable: true });
Object.defineProperty(ErrorEvent.prototype, 'message', { enumerable: true });
/**
* Class representing a message event.
*
* @extends Event
*/
class MessageEvent extends Event$1 {
/**
* Create a new `MessageEvent`.
*
* @param {String} type The name of the event
* @param {Object} [options] A dictionary object that allows for setting
* attributes via object members of the same name
* @param {*} [options.data=null] The message content
*/
constructor(type, options = {}) {
super(type);
this[kData] = options.data === undefined ? null : options.data;
}
/**
* @type {*}
*/
get data() {
return this[kData];
}
}
Object.defineProperty(MessageEvent.prototype, 'data', { enumerable: true });
/**
* This provides methods for emulating the `EventTarget` interface. It's not
* meant to be used directly.
*
* @mixin
*/
const EventTarget = {
/**
* Register an event listener.
*
* @param {String} type A string representing the event type to listen for
* @param {(Function|Object)} handler The listener to add
* @param {Object} [options] An options object specifies characteristics about
* the event listener
* @param {Boolean} [options.once=false] A `Boolean` indicating that the
* listener should be invoked at most once after being added. If `true`,
* the listener would be automatically removed when invoked.
* @public
*/
addEventListener(type, handler, options = {}) {
for (const listener of this.listeners(type)) {
if (
!options[kForOnEventAttribute$1] &&
listener[kListener$1] === handler &&
!listener[kForOnEventAttribute$1]
) {
return;
}
}
let wrapper;
if (type === 'message') {
wrapper = function onMessage(data, isBinary) {
const event = new MessageEvent('message', {
data: isBinary ? data : data.toString()
});
event[kTarget] = this;
callListener(handler, this, event);
};
} else if (type === 'close') {
wrapper = function onClose(code, message) {
const event = new CloseEvent('close', {
code,
reason: message.toString(),
wasClean: this._closeFrameReceived && this._closeFrameSent
});
event[kTarget] = this;
callListener(handler, this, event);
};
} else if (type === 'error') {
wrapper = function onError(error) {
const event = new ErrorEvent('error', {
error,
message: error.message
});
event[kTarget] = this;
callListener(handler, this, event);
};
} else if (type === 'open') {
wrapper = function onOpen() {
const event = new Event$1('open');
event[kTarget] = this;
callListener(handler, this, event);
};
} else {
return;
}
wrapper[kForOnEventAttribute$1] = !!options[kForOnEventAttribute$1];
wrapper[kListener$1] = handler;
if (options.once) {
this.once(type, wrapper);
} else {
this.on(type, wrapper);
}
},
/**
* Remove an event listener.
*
* @param {String} type A string representing the event type to remove
* @param {(Function|Object)} handler The listener to remove
* @public
*/
removeEventListener(type, handler) {
for (const listener of this.listeners(type)) {
if (listener[kListener$1] === handler && !listener[kForOnEventAttribute$1]) {
this.removeListener(type, listener);
break;
}
}
}
};
var eventTarget = {
CloseEvent,
ErrorEvent,
Event: Event$1,
EventTarget,
MessageEvent
};
/**
* Call an event listener
*
* @param {(Function|Object)} listener The listener to call
* @param {*} thisArg The value to use as `this`` when calling the listener
* @param {Event} event The event to pass to the listener
* @private
*/
function callListener(listener, thisArg, event) {
if (typeof listener === 'object' && listener.handleEvent) {
listener.handleEvent.call(listener, event);
} else {
listener.call(thisArg, event);
}
}
const { tokenChars: tokenChars$1 } = validationExports;
/**
* Adds an offer to the map of extension offers or a parameter to the map of
* parameters.
*
* @param {Object} dest The map of extension offers or parameters
* @param {String} name The extension or parameter name
* @param {(Object|Boolean|String)} elem The extension parameters or the
* parameter value
* @private
*/
function push(dest, name, elem) {
if (dest[name] === undefined) dest[name] = [elem];
else dest[name].push(elem);
}
/**
* Parses the `Sec-WebSocket-Extensions` header into an object.
*
* @param {String} header The field value of the header
* @return {Object} The parsed object
* @public
*/
function parse$2(header) {
const offers = Object.create(null);
let params = Object.create(null);
let mustUnescape = false;
let isEscaping = false;
let inQuotes = false;
let extensionName;
let paramName;
let start = -1;
let code = -1;
let end = -1;
let i = 0;
for (; i < header.length; i++) {
code = header.charCodeAt(i);
if (extensionName === undefined) {
if (end === -1 && tokenChars$1[code] === 1) {
if (start === -1) start = i;
} else if (
i !== 0 &&
(code === 0x20 /* ' ' */ || code === 0x09) /* '\t' */
) {
if (end === -1 && start !== -1) end = i;
} else if (code === 0x3b /* ';' */ || code === 0x2c /* ',' */) {
if (start === -1) {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
if (end === -1) end = i;
const name = header.slice(start, end);
if (code === 0x2c) {
push(offers, name, params);
params = Object.create(null);
} else {
extensionName = name;
}
start = end = -1;
} else {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
} else if (paramName === undefined) {
if (end === -1 && tokenChars$1[code] === 1) {
if (start === -1) start = i;
} else if (code === 0x20 || code === 0x09) {
if (end === -1 && start !== -1) end = i;
} else if (code === 0x3b || code === 0x2c) {
if (start === -1) {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
if (end === -1) end = i;
push(params, header.slice(start, end), true);
if (code === 0x2c) {
push(offers, extensionName, params);
params = Object.create(null);
extensionName = undefined;
}
start = end = -1;
} else if (code === 0x3d /* '=' */ && start !== -1 && end === -1) {
paramName = header.slice(start, i);
start = end = -1;
} else {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
} else {
//
// The value of a quoted-string after unescaping must conform to the
// token ABNF, so only token characters are valid.
// Ref: https://tools.ietf.org/html/rfc6455#section-9.1
//
if (isEscaping) {
if (tokenChars$1[code] !== 1) {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
if (start === -1) start = i;
else if (!mustUnescape) mustUnescape = true;
isEscaping = false;
} else if (inQuotes) {
if (tokenChars$1[code] === 1) {
if (start === -1) start = i;
} else if (code === 0x22 /* '"' */ && start !== -1) {
inQuotes = false;
end = i;
} else if (code === 0x5c /* '\' */) {
isEscaping = true;
} else {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
} else if (code === 0x22 && header.charCodeAt(i - 1) === 0x3d) {
inQuotes = true;
} else if (end === -1 && tokenChars$1[code] === 1) {
if (start === -1) start = i;
} else if (start !== -1 && (code === 0x20 || code === 0x09)) {
if (end === -1) end = i;
} else if (code === 0x3b || code === 0x2c) {
if (start === -1) {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
if (end === -1) end = i;
let value = header.slice(start, end);
if (mustUnescape) {
value = value.replace(/\\/g, '');
mustUnescape = false;
}
push(params, paramName, value);
if (code === 0x2c) {
push(offers, extensionName, params);
params = Object.create(null);
extensionName = undefined;
}
paramName = undefined;
start = end = -1;
} else {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
}
}
if (start === -1 || inQuotes || code === 0x20 || code === 0x09) {
throw new SyntaxError('Unexpected end of input');
}
if (end === -1) end = i;
const token = header.slice(start, end);
if (extensionName === undefined) {
push(offers, token, params);
} else {
if (paramName === undefined) {
push(params, token, true);
} else if (mustUnescape) {
push(params, paramName, token.replace(/\\/g, ''));
} else {
push(params, paramName, token);
}
push(offers, extensionName, params);
}
return offers;
}
/**
* Builds the `Sec-WebSocket-Extensions` header field value.
*
* @param {Object} extensions The map of extensions and parameters to format
* @return {String} A string representing the given object
* @public
*/
function format$1(extensions) {
return Object.keys(extensions)
.map((extension) => {
let configurations = extensions[extension];
if (!Array.isArray(configurations)) configurations = [configurations];
return configurations
.map((params) => {
return [extension]
.concat(
Object.keys(params).map((k) => {
let values = params[k];
if (!Array.isArray(values)) values = [values];
return values
.map((v) => (v === true ? k : `${k}=${v}`))
.join('; ');
})
)
.join('; ');
})
.join(', ');
})
.join(', ');
}
var extension$1 = { format: format$1, parse: parse$2 };
/* eslint no-unused-vars: ["error", { "varsIgnorePattern": "^Duplex|Readable$", "caughtErrors": "none" }] */
const EventEmitter$1 = require$$0$7;
const https$2 = require$$1$1;
const http$3 = require$$1;
const net = require$$4$1;
const tls = require$$4$2;
const { randomBytes, createHash: createHash$1 } = require$$3$1;
const { URL: URL$2 } = require$$0$9;
const PerMessageDeflate$1 = permessageDeflate;
const Receiver = receiver;
const Sender = sender;
const { isBlob } = validationExports;
const {
BINARY_TYPES,
EMPTY_BUFFER,
GUID: GUID$1,
kForOnEventAttribute,
kListener,
kStatusCode,
kWebSocket: kWebSocket$1,
NOOP: NOOP$1
} = constants;
const {
EventTarget: { addEventListener, removeEventListener }
} = eventTarget;
const { format, parse: parse$1 } = extension$1;
const { toBuffer } = bufferUtilExports;
const closeTimeout = 30 * 1000;
const kAborted = Symbol('kAborted');
const protocolVersions = [8, 13];
const readyStates = ['CONNECTING', 'OPEN', 'CLOSING', 'CLOSED'];
const subprotocolRegex = /^[!#$%&'*+\-.0-9A-Z^_`|a-z~]+$/;
/**
* Class representing a WebSocket.
*
* @extends EventEmitter
*/
let WebSocket$1 = class WebSocket extends EventEmitter$1 {
/**
* Create a new `WebSocket`.
*
* @param {(String|URL)} address The URL to which to connect
* @param {(String|String[])} [protocols] The subprotocols
* @param {Object} [options] Connection options
*/
constructor(address, protocols, options) {
super();
this._binaryType = BINARY_TYPES[0];
this._closeCode = 1006;
this._closeFrameReceived = false;
this._closeFrameSent = false;
this._closeMessage = EMPTY_BUFFER;
this._closeTimer = null;
this._errorEmitted = false;
this._extensions = {};
this._paused = false;
this._protocol = '';
this._readyState = WebSocket.CONNECTING;
this._receiver = null;
this._sender = null;
this._socket = null;
if (address !== null) {
this._bufferedAmount = 0;
this._isServer = false;
this._redirects = 0;
if (protocols === undefined) {
protocols = [];
} else if (!Array.isArray(protocols)) {
if (typeof protocols === 'object' && protocols !== null) {
options = protocols;
protocols = [];
} else {
protocols = [protocols];
}
}
initAsClient(this, address, protocols, options);
} else {
this._autoPong = options.autoPong;
this._isServer = true;
}
}
/**
* For historical reasons, the custom "nodebuffer" type is used by the default
* instead of "blob".
*
* @type {String}
*/
get binaryType() {
return this._binaryType;
}
set binaryType(type) {
if (!BINARY_TYPES.includes(type)) return;
this._binaryType = type;
//
// Allow to change `binaryType` on the fly.
//
if (this._receiver) this._receiver._binaryType = type;
}
/**
* @type {Number}
*/
get bufferedAmount() {
if (!this._socket) return this._bufferedAmount;
return this._socket._writableState.length + this._sender._bufferedBytes;
}
/**
* @type {String}
*/
get extensions() {
return Object.keys(this._extensions).join();
}
/**
* @type {Boolean}
*/
get isPaused() {
return this._paused;
}
/**
* @type {Function}
*/
/* istanbul ignore next */
get onclose() {
return null;
}
/**
* @type {Function}
*/
/* istanbul ignore next */
get onerror() {
return null;
}
/**
* @type {Function}
*/
/* istanbul ignore next */
get onopen() {
return null;
}
/**
* @type {Function}
*/
/* istanbul ignore next */
get onmessage() {
return null;
}
/**
* @type {String}
*/
get protocol() {
return this._protocol;
}
/**
* @type {Number}
*/
get readyState() {
return this._readyState;
}
/**
* @type {String}
*/
get url() {
return this._url;
}
/**
* Set up the socket and the internal resources.
*
* @param {Duplex} socket The network socket between the server and client
* @param {Buffer} head The first packet of the upgraded stream
* @param {Object} options Options object
* @param {Boolean} [options.allowSynchronousEvents=false] Specifies whether
* any of the `'message'`, `'ping'`, and `'pong'` events can be emitted
* multiple times in the same tick
* @param {Function} [options.generateMask] The function used to generate the
* masking key
* @param {Number} [options.maxPayload=0] The maximum allowed message size
* @param {Boolean} [options.skipUTF8Validation=false] Specifies whether or
* not to skip UTF-8 validation for text and close messages
* @private
*/
setSocket(socket, head, options) {
const receiver = new Receiver({
allowSynchronousEvents: options.allowSynchronousEvents,
binaryType: this.binaryType,
extensions: this._extensions,
isServer: this._isServer,
maxPayload: options.maxPayload,
skipUTF8Validation: options.skipUTF8Validation
});
const sender = new Sender(socket, this._extensions, options.generateMask);
this._receiver = receiver;
this._sender = sender;
this._socket = socket;
receiver[kWebSocket$1] = this;
sender[kWebSocket$1] = this;
socket[kWebSocket$1] = this;
receiver.on('conclude', receiverOnConclude);
receiver.on('drain', receiverOnDrain);
receiver.on('error', receiverOnError);
receiver.on('message', receiverOnMessage);
receiver.on('ping', receiverOnPing);
receiver.on('pong', receiverOnPong);
sender.onerror = senderOnError;
//
// These methods may not be available if `socket` is just a `Duplex`.
//
if (socket.setTimeout) socket.setTimeout(0);
if (socket.setNoDelay) socket.setNoDelay();
if (head.length > 0) socket.unshift(head);
socket.on('close', socketOnClose);
socket.on('data', socketOnData);
socket.on('end', socketOnEnd);
socket.on('error', socketOnError$1);
this._readyState = WebSocket.OPEN;
this.emit('open');
}
/**
* Emit the `'close'` event.
*
* @private
*/
emitClose() {
if (!this._socket) {
this._readyState = WebSocket.CLOSED;
this.emit('close', this._closeCode, this._closeMessage);
return;
}
if (this._extensions[PerMessageDeflate$1.extensionName]) {
this._extensions[PerMessageDeflate$1.extensionName].cleanup();
}
this._receiver.removeAllListeners();
this._readyState = WebSocket.CLOSED;
this.emit('close', this._closeCode, this._closeMessage);
}
/**
* Start a closing handshake.
*
* +----------+ +-----------+ +----------+
* - - -|ws.close()|-->|close frame|-->|ws.close()|- - -
* | +----------+ +-----------+ +----------+ |
* +----------+ +-----------+ |
* CLOSING |ws.close()|<--|close frame|<--+-----+ CLOSING
* +----------+ +-----------+ |
* | | | +---+ |
* +------------------------+-->|fin| - - - -
* | +---+ | +---+
* - - - - -|fin|<---------------------+
* +---+
*
* @param {Number} [code] Status code explaining why the connection is closing
* @param {(String|Buffer)} [data] The reason why the connection is
* closing
* @public
*/
close(code, data) {
if (this.readyState === WebSocket.CLOSED) return;
if (this.readyState === WebSocket.CONNECTING) {
const msg = 'WebSocket was closed before the connection was established';
abortHandshake$1(this, this._req, msg);
return;
}
if (this.readyState === WebSocket.CLOSING) {
if (
this._closeFrameSent &&
(this._closeFrameReceived || this._receiver._writableState.errorEmitted)
) {
this._socket.end();
}
return;
}
this._readyState = WebSocket.CLOSING;
this._sender.close(code, data, !this._isServer, (err) => {
//
// This error is handled by the `'error'` listener on the socket. We only
// want to know if the close frame has been sent here.
//
if (err) return;
this._closeFrameSent = true;
if (
this._closeFrameReceived ||
this._receiver._writableState.errorEmitted
) {
this._socket.end();
}
});
setCloseTimer(this);
}
/**
* Pause the socket.
*
* @public
*/
pause() {
if (
this.readyState === WebSocket.CONNECTING ||
this.readyState === WebSocket.CLOSED
) {
return;
}
this._paused = true;
this._socket.pause();
}
/**
* Send a ping.
*
* @param {*} [data] The data to send
* @param {Boolean} [mask] Indicates whether or not to mask `data`
* @param {Function} [cb] Callback which is executed when the ping is sent
* @public
*/
ping(data, mask, cb) {
if (this.readyState === WebSocket.CONNECTING) {
throw new Error('WebSocket is not open: readyState 0 (CONNECTING)');
}
if (typeof data === 'function') {
cb = data;
data = mask = undefined;
} else if (typeof mask === 'function') {
cb = mask;
mask = undefined;
}
if (typeof data === 'number') data = data.toString();
if (this.readyState !== WebSocket.OPEN) {
sendAfterClose(this, data, cb);
return;
}
if (mask === undefined) mask = !this._isServer;
this._sender.ping(data || EMPTY_BUFFER, mask, cb);
}
/**
* Send a pong.
*
* @param {*} [data] The data to send
* @param {Boolean} [mask] Indicates whether or not to mask `data`
* @param {Function} [cb] Callback which is executed when the pong is sent
* @public
*/
pong(data, mask, cb) {
if (this.readyState === WebSocket.CONNECTING) {
throw new Error('WebSocket is not open: readyState 0 (CONNECTING)');
}
if (typeof data === 'function') {
cb = data;
data = mask = undefined;
} else if (typeof mask === 'function') {
cb = mask;
mask = undefined;
}
if (typeof data === 'number') data = data.toString();
if (this.readyState !== WebSocket.OPEN) {
sendAfterClose(this, data, cb);
return;
}
if (mask === undefined) mask = !this._isServer;
this._sender.pong(data || EMPTY_BUFFER, mask, cb);
}
/**
* Resume the socket.
*
* @public
*/
resume() {
if (
this.readyState === WebSocket.CONNECTING ||
this.readyState === WebSocket.CLOSED
) {
return;
}
this._paused = false;
if (!this._receiver._writableState.needDrain) this._socket.resume();
}
/**
* Send a data message.
*
* @param {*} data The message to send
* @param {Object} [options] Options object
* @param {Boolean} [options.binary] Specifies whether `data` is binary or
* text
* @param {Boolean} [options.compress] Specifies whether or not to compress
* `data`
* @param {Boolean} [options.fin=true] Specifies whether the fragment is the
* last one
* @param {Boolean} [options.mask] Specifies whether or not to mask `data`
* @param {Function} [cb] Callback which is executed when data is written out
* @public
*/
send(data, options, cb) {
if (this.readyState === WebSocket.CONNECTING) {
throw new Error('WebSocket is not open: readyState 0 (CONNECTING)');
}
if (typeof options === 'function') {
cb = options;
options = {};
}
if (typeof data === 'number') data = data.toString();
if (this.readyState !== WebSocket.OPEN) {
sendAfterClose(this, data, cb);
return;
}
const opts = {
binary: typeof data !== 'string',
mask: !this._isServer,
compress: true,
fin: true,
...options
};
if (!this._extensions[PerMessageDeflate$1.extensionName]) {
opts.compress = false;
}
this._sender.send(data || EMPTY_BUFFER, opts, cb);
}
/**
* Forcibly close the connection.
*
* @public
*/
terminate() {
if (this.readyState === WebSocket.CLOSED) return;
if (this.readyState === WebSocket.CONNECTING) {
const msg = 'WebSocket was closed before the connection was established';
abortHandshake$1(this, this._req, msg);
return;
}
if (this._socket) {
this._readyState = WebSocket.CLOSING;
this._socket.destroy();
}
}
};
/**
* @constant {Number} CONNECTING
* @memberof WebSocket
*/
Object.defineProperty(WebSocket$1, 'CONNECTING', {
enumerable: true,
value: readyStates.indexOf('CONNECTING')
});
/**
* @constant {Number} CONNECTING
* @memberof WebSocket.prototype
*/
Object.defineProperty(WebSocket$1.prototype, 'CONNECTING', {
enumerable: true,
value: readyStates.indexOf('CONNECTING')
});
/**
* @constant {Number} OPEN
* @memberof WebSocket
*/
Object.defineProperty(WebSocket$1, 'OPEN', {
enumerable: true,
value: readyStates.indexOf('OPEN')
});
/**
* @constant {Number} OPEN
* @memberof WebSocket.prototype
*/
Object.defineProperty(WebSocket$1.prototype, 'OPEN', {
enumerable: true,
value: readyStates.indexOf('OPEN')
});
/**
* @constant {Number} CLOSING
* @memberof WebSocket
*/
Object.defineProperty(WebSocket$1, 'CLOSING', {
enumerable: true,
value: readyStates.indexOf('CLOSING')
});
/**
* @constant {Number} CLOSING
* @memberof WebSocket.prototype
*/
Object.defineProperty(WebSocket$1.prototype, 'CLOSING', {
enumerable: true,
value: readyStates.indexOf('CLOSING')
});
/**
* @constant {Number} CLOSED
* @memberof WebSocket
*/
Object.defineProperty(WebSocket$1, 'CLOSED', {
enumerable: true,
value: readyStates.indexOf('CLOSED')
});
/**
* @constant {Number} CLOSED
* @memberof WebSocket.prototype
*/
Object.defineProperty(WebSocket$1.prototype, 'CLOSED', {
enumerable: true,
value: readyStates.indexOf('CLOSED')
});
[
'binaryType',
'bufferedAmount',
'extensions',
'isPaused',
'protocol',
'readyState',
'url'
].forEach((property) => {
Object.defineProperty(WebSocket$1.prototype, property, { enumerable: true });
});
//
// Add the `onopen`, `onerror`, `onclose`, and `onmessage` attributes.
// See https://html.spec.whatwg.org/multipage/comms.html#the-websocket-interface
//
['open', 'error', 'close', 'message'].forEach((method) => {
Object.defineProperty(WebSocket$1.prototype, `on${method}`, {
enumerable: true,
get() {
for (const listener of this.listeners(method)) {
if (listener[kForOnEventAttribute]) return listener[kListener];
}
return null;
},
set(handler) {
for (const listener of this.listeners(method)) {
if (listener[kForOnEventAttribute]) {
this.removeListener(method, listener);
break;
}
}
if (typeof handler !== 'function') return;
this.addEventListener(method, handler, {
[kForOnEventAttribute]: true
});
}
});
});
WebSocket$1.prototype.addEventListener = addEventListener;
WebSocket$1.prototype.removeEventListener = removeEventListener;
var websocket = WebSocket$1;
/**
* Initialize a WebSocket client.
*
* @param {WebSocket} websocket The client to initialize
* @param {(String|URL)} address The URL to which to connect
* @param {Array} protocols The subprotocols
* @param {Object} [options] Connection options
* @param {Boolean} [options.allowSynchronousEvents=true] Specifies whether any
* of the `'message'`, `'ping'`, and `'pong'` events can be emitted multiple
* times in the same tick
* @param {Boolean} [options.autoPong=true] Specifies whether or not to
* automatically send a pong in response to a ping
* @param {Function} [options.finishRequest] A function which can be used to
* customize the headers of each http request before it is sent
* @param {Boolean} [options.followRedirects=false] Whether or not to follow
* redirects
* @param {Function} [options.generateMask] The function used to generate the
* masking key
* @param {Number} [options.handshakeTimeout] Timeout in milliseconds for the
* handshake request
* @param {Number} [options.maxPayload=104857600] The maximum allowed message
* size
* @param {Number} [options.maxRedirects=10] The maximum number of redirects
* allowed
* @param {String} [options.origin] Value of the `Origin` or
* `Sec-WebSocket-Origin` header
* @param {(Boolean|Object)} [options.perMessageDeflate=true] Enable/disable
* permessage-deflate
* @param {Number} [options.protocolVersion=13] Value of the
* `Sec-WebSocket-Version` header
* @param {Boolean} [options.skipUTF8Validation=false] Specifies whether or
* not to skip UTF-8 validation for text and close messages
* @private
*/
function initAsClient(websocket, address, protocols, options) {
const opts = {
allowSynchronousEvents: true,
autoPong: true,
protocolVersion: protocolVersions[1],
maxPayload: 100 * 1024 * 1024,
skipUTF8Validation: false,
perMessageDeflate: true,
followRedirects: false,
maxRedirects: 10,
...options,
socketPath: undefined,
hostname: undefined,
protocol: undefined,
timeout: undefined,
method: 'GET',
host: undefined,
path: undefined,
port: undefined
};
websocket._autoPong = opts.autoPong;
if (!protocolVersions.includes(opts.protocolVersion)) {
throw new RangeError(
`Unsupported protocol version: ${opts.protocolVersion} ` +
`(supported versions: ${protocolVersions.join(', ')})`
);
}
let parsedUrl;
if (address instanceof URL$2) {
parsedUrl = address;
} else {
try {
parsedUrl = new URL$2(address);
} catch (e) {
throw new SyntaxError(`Invalid URL: ${address}`);
}
}
if (parsedUrl.protocol === 'http:') {
parsedUrl.protocol = 'ws:';
} else if (parsedUrl.protocol === 'https:') {
parsedUrl.protocol = 'wss:';
}
websocket._url = parsedUrl.href;
const isSecure = parsedUrl.protocol === 'wss:';
const isIpcUrl = parsedUrl.protocol === 'ws+unix:';
let invalidUrlMessage;
if (parsedUrl.protocol !== 'ws:' && !isSecure && !isIpcUrl) {
invalidUrlMessage =
'The URL\'s protocol must be one of "ws:", "wss:", ' +
'"http:", "https", or "ws+unix:"';
} else if (isIpcUrl && !parsedUrl.pathname) {
invalidUrlMessage = "The URL's pathname is empty";
} else if (parsedUrl.hash) {
invalidUrlMessage = 'The URL contains a fragment identifier';
}
if (invalidUrlMessage) {
const err = new SyntaxError(invalidUrlMessage);
if (websocket._redirects === 0) {
throw err;
} else {
emitErrorAndClose(websocket, err);
return;
}
}
const defaultPort = isSecure ? 443 : 80;
const key = randomBytes(16).toString('base64');
const request = isSecure ? https$2.request : http$3.request;
const protocolSet = new Set();
let perMessageDeflate;
opts.createConnection =
opts.createConnection || (isSecure ? tlsConnect : netConnect);
opts.defaultPort = opts.defaultPort || defaultPort;
opts.port = parsedUrl.port || defaultPort;
opts.host = parsedUrl.hostname.startsWith('[')
? parsedUrl.hostname.slice(1, -1)
: parsedUrl.hostname;
opts.headers = {
...opts.headers,
'Sec-WebSocket-Version': opts.protocolVersion,
'Sec-WebSocket-Key': key,
Connection: 'Upgrade',
Upgrade: 'websocket'
};
opts.path = parsedUrl.pathname + parsedUrl.search;
opts.timeout = opts.handshakeTimeout;
if (opts.perMessageDeflate) {
perMessageDeflate = new PerMessageDeflate$1(
opts.perMessageDeflate !== true ? opts.perMessageDeflate : {},
false,
opts.maxPayload
);
opts.headers['Sec-WebSocket-Extensions'] = format({
[PerMessageDeflate$1.extensionName]: perMessageDeflate.offer()
});
}
if (protocols.length) {
for (const protocol of protocols) {
if (
typeof protocol !== 'string' ||
!subprotocolRegex.test(protocol) ||
protocolSet.has(protocol)
) {
throw new SyntaxError(
'An invalid or duplicated subprotocol was specified'
);
}
protocolSet.add(protocol);
}
opts.headers['Sec-WebSocket-Protocol'] = protocols.join(',');
}
if (opts.origin) {
if (opts.protocolVersion < 13) {
opts.headers['Sec-WebSocket-Origin'] = opts.origin;
} else {
opts.headers.Origin = opts.origin;
}
}
if (parsedUrl.username || parsedUrl.password) {
opts.auth = `${parsedUrl.username}:${parsedUrl.password}`;
}
if (isIpcUrl) {
const parts = opts.path.split(':');
opts.socketPath = parts[0];
opts.path = parts[1];
}
let req;
if (opts.followRedirects) {
if (websocket._redirects === 0) {
websocket._originalIpc = isIpcUrl;
websocket._originalSecure = isSecure;
websocket._originalHostOrSocketPath = isIpcUrl
? opts.socketPath
: parsedUrl.host;
const headers = options && options.headers;
//
// Shallow copy the user provided options so that headers can be changed
// without mutating the original object.
//
options = { ...options, headers: {} };
if (headers) {
for (const [key, value] of Object.entries(headers)) {
options.headers[key.toLowerCase()] = value;
}
}
} else if (websocket.listenerCount('redirect') === 0) {
const isSameHost = isIpcUrl
? websocket._originalIpc
? opts.socketPath === websocket._originalHostOrSocketPath
: false
: websocket._originalIpc
? false
: parsedUrl.host === websocket._originalHostOrSocketPath;
if (!isSameHost || (websocket._originalSecure && !isSecure)) {
//
// Match curl 7.77.0 behavior and drop the following headers. These
// headers are also dropped when following a redirect to a subdomain.
//
delete opts.headers.authorization;
delete opts.headers.cookie;
if (!isSameHost) delete opts.headers.host;
opts.auth = undefined;
}
}
//
// Match curl 7.77.0 behavior and make the first `Authorization` header win.
// If the `Authorization` header is set, then there is nothing to do as it
// will take precedence.
//
if (opts.auth && !options.headers.authorization) {
options.headers.authorization =
'Basic ' + Buffer.from(opts.auth).toString('base64');
}
req = websocket._req = request(opts);
if (websocket._redirects) {
//
// Unlike what is done for the `'upgrade'` event, no early exit is
// triggered here if the user calls `websocket.close()` or
// `websocket.terminate()` from a listener of the `'redirect'` event. This
// is because the user can also call `request.destroy()` with an error
// before calling `websocket.close()` or `websocket.terminate()` and this
// would result in an error being emitted on the `request` object with no
// `'error'` event listeners attached.
//
websocket.emit('redirect', websocket.url, req);
}
} else {
req = websocket._req = request(opts);
}
if (opts.timeout) {
req.on('timeout', () => {
abortHandshake$1(websocket, req, 'Opening handshake has timed out');
});
}
req.on('error', (err) => {
if (req === null || req[kAborted]) return;
req = websocket._req = null;
emitErrorAndClose(websocket, err);
});
req.on('response', (res) => {
const location = res.headers.location;
const statusCode = res.statusCode;
if (
location &&
opts.followRedirects &&
statusCode >= 300 &&
statusCode < 400
) {
if (++websocket._redirects > opts.maxRedirects) {
abortHandshake$1(websocket, req, 'Maximum redirects exceeded');
return;
}
req.abort();
let addr;
try {
addr = new URL$2(location, address);
} catch (e) {
const err = new SyntaxError(`Invalid URL: ${location}`);
emitErrorAndClose(websocket, err);
return;
}
initAsClient(websocket, addr, protocols, options);
} else if (!websocket.emit('unexpected-response', req, res)) {
abortHandshake$1(
websocket,
req,
`Unexpected server response: ${res.statusCode}`
);
}
});
req.on('upgrade', (res, socket, head) => {
websocket.emit('upgrade', res);
//
// The user may have closed the connection from a listener of the
// `'upgrade'` event.
//
if (websocket.readyState !== WebSocket$1.CONNECTING) return;
req = websocket._req = null;
const upgrade = res.headers.upgrade;
if (upgrade === undefined || upgrade.toLowerCase() !== 'websocket') {
abortHandshake$1(websocket, socket, 'Invalid Upgrade header');
return;
}
const digest = createHash$1('sha1')
.update(key + GUID$1)
.digest('base64');
if (res.headers['sec-websocket-accept'] !== digest) {
abortHandshake$1(websocket, socket, 'Invalid Sec-WebSocket-Accept header');
return;
}
const serverProt = res.headers['sec-websocket-protocol'];
let protError;
if (serverProt !== undefined) {
if (!protocolSet.size) {
protError = 'Server sent a subprotocol but none was requested';
} else if (!protocolSet.has(serverProt)) {
protError = 'Server sent an invalid subprotocol';
}
} else if (protocolSet.size) {
protError = 'Server sent no subprotocol';
}
if (protError) {
abortHandshake$1(websocket, socket, protError);
return;
}
if (serverProt) websocket._protocol = serverProt;
const secWebSocketExtensions = res.headers['sec-websocket-extensions'];
if (secWebSocketExtensions !== undefined) {
if (!perMessageDeflate) {
const message =
'Server sent a Sec-WebSocket-Extensions header but no extension ' +
'was requested';
abortHandshake$1(websocket, socket, message);
return;
}
let extensions;
try {
extensions = parse$1(secWebSocketExtensions);
} catch (err) {
const message = 'Invalid Sec-WebSocket-Extensions header';
abortHandshake$1(websocket, socket, message);
return;
}
const extensionNames = Object.keys(extensions);
if (
extensionNames.length !== 1 ||
extensionNames[0] !== PerMessageDeflate$1.extensionName
) {
const message = 'Server indicated an extension that was not requested';
abortHandshake$1(websocket, socket, message);
return;
}
try {
perMessageDeflate.accept(extensions[PerMessageDeflate$1.extensionName]);
} catch (err) {
const message = 'Invalid Sec-WebSocket-Extensions header';
abortHandshake$1(websocket, socket, message);
return;
}
websocket._extensions[PerMessageDeflate$1.extensionName] =
perMessageDeflate;
}
websocket.setSocket(socket, head, {
allowSynchronousEvents: opts.allowSynchronousEvents,
generateMask: opts.generateMask,
maxPayload: opts.maxPayload,
skipUTF8Validation: opts.skipUTF8Validation
});
});
if (opts.finishRequest) {
opts.finishRequest(req, websocket);
} else {
req.end();
}
}
/**
* Emit the `'error'` and `'close'` events.
*
* @param {WebSocket} websocket The WebSocket instance
* @param {Error} The error to emit
* @private
*/
function emitErrorAndClose(websocket, err) {
websocket._readyState = WebSocket$1.CLOSING;
//
// The following assignment is practically useless and is done only for
// consistency.
//
websocket._errorEmitted = true;
websocket.emit('error', err);
websocket.emitClose();
}
/**
* Create a `net.Socket` and initiate a connection.
*
* @param {Object} options Connection options
* @return {net.Socket} The newly created socket used to start the connection
* @private
*/
function netConnect(options) {
options.path = options.socketPath;
return net.connect(options);
}
/**
* Create a `tls.TLSSocket` and initiate a connection.
*
* @param {Object} options Connection options
* @return {tls.TLSSocket} The newly created socket used to start the connection
* @private
*/
function tlsConnect(options) {
options.path = undefined;
if (!options.servername && options.servername !== '') {
options.servername = net.isIP(options.host) ? '' : options.host;
}
return tls.connect(options);
}
/**
* Abort the handshake and emit an error.
*
* @param {WebSocket} websocket The WebSocket instance
* @param {(http.ClientRequest|net.Socket|tls.Socket)} stream The request to
* abort or the socket to destroy
* @param {String} message The error message
* @private
*/
function abortHandshake$1(websocket, stream, message) {
websocket._readyState = WebSocket$1.CLOSING;
const err = new Error(message);
Error.captureStackTrace(err, abortHandshake$1);
if (stream.setHeader) {
stream[kAborted] = true;
stream.abort();
if (stream.socket && !stream.socket.destroyed) {
//
// On Node.js >= 14.3.0 `request.abort()` does not destroy the socket if
// called after the request completed. See
// https://github.com/websockets/ws/issues/1869.
//
stream.socket.destroy();
}
process.nextTick(emitErrorAndClose, websocket, err);
} else {
stream.destroy(err);
stream.once('error', websocket.emit.bind(websocket, 'error'));
stream.once('close', websocket.emitClose.bind(websocket));
}
}
/**
* Handle cases where the `ping()`, `pong()`, or `send()` methods are called
* when the `readyState` attribute is `CLOSING` or `CLOSED`.
*
* @param {WebSocket} websocket The WebSocket instance
* @param {*} [data] The data to send
* @param {Function} [cb] Callback
* @private
*/
function sendAfterClose(websocket, data, cb) {
if (data) {
const length = isBlob(data) ? data.size : toBuffer(data).length;
//
// The `_bufferedAmount` property is used only when the peer is a client and
// the opening handshake fails. Under these circumstances, in fact, the
// `setSocket()` method is not called, so the `_socket` and `_sender`
// properties are set to `null`.
//
if (websocket._socket) websocket._sender._bufferedBytes += length;
else websocket._bufferedAmount += length;
}
if (cb) {
const err = new Error(
`WebSocket is not open: readyState ${websocket.readyState} ` +
`(${readyStates[websocket.readyState]})`
);
process.nextTick(cb, err);
}
}
/**
* The listener of the `Receiver` `'conclude'` event.
*
* @param {Number} code The status code
* @param {Buffer} reason The reason for closing
* @private
*/
function receiverOnConclude(code, reason) {
const websocket = this[kWebSocket$1];
websocket._closeFrameReceived = true;
websocket._closeMessage = reason;
websocket._closeCode = code;
if (websocket._socket[kWebSocket$1] === undefined) return;
websocket._socket.removeListener('data', socketOnData);
process.nextTick(resume, websocket._socket);
if (code === 1005) websocket.close();
else websocket.close(code, reason);
}
/**
* The listener of the `Receiver` `'drain'` event.
*
* @private
*/
function receiverOnDrain() {
const websocket = this[kWebSocket$1];
if (!websocket.isPaused) websocket._socket.resume();
}
/**
* The listener of the `Receiver` `'error'` event.
*
* @param {(RangeError|Error)} err The emitted error
* @private
*/
function receiverOnError(err) {
const websocket = this[kWebSocket$1];
if (websocket._socket[kWebSocket$1] !== undefined) {
websocket._socket.removeListener('data', socketOnData);
//
// On Node.js < 14.0.0 the `'error'` event is emitted synchronously. See
// https://github.com/websockets/ws/issues/1940.
//
process.nextTick(resume, websocket._socket);
websocket.close(err[kStatusCode]);
}
if (!websocket._errorEmitted) {
websocket._errorEmitted = true;
websocket.emit('error', err);
}
}
/**
* The listener of the `Receiver` `'finish'` event.
*
* @private
*/
function receiverOnFinish() {
this[kWebSocket$1].emitClose();
}
/**
* The listener of the `Receiver` `'message'` event.
*
* @param {Buffer|ArrayBuffer|Buffer[])} data The message
* @param {Boolean} isBinary Specifies whether the message is binary or not
* @private
*/
function receiverOnMessage(data, isBinary) {
this[kWebSocket$1].emit('message', data, isBinary);
}
/**
* The listener of the `Receiver` `'ping'` event.
*
* @param {Buffer} data The data included in the ping frame
* @private
*/
function receiverOnPing(data) {
const websocket = this[kWebSocket$1];
if (websocket._autoPong) websocket.pong(data, !this._isServer, NOOP$1);
websocket.emit('ping', data);
}
/**
* The listener of the `Receiver` `'pong'` event.
*
* @param {Buffer} data The data included in the pong frame
* @private
*/
function receiverOnPong(data) {
this[kWebSocket$1].emit('pong', data);
}
/**
* Resume a readable stream
*
* @param {Readable} stream The readable stream
* @private
*/
function resume(stream) {
stream.resume();
}
/**
* The `Sender` error event handler.
*
* @param {Error} The error
* @private
*/
function senderOnError(err) {
const websocket = this[kWebSocket$1];
if (websocket.readyState === WebSocket$1.CLOSED) return;
if (websocket.readyState === WebSocket$1.OPEN) {
websocket._readyState = WebSocket$1.CLOSING;
setCloseTimer(websocket);
}
//
// `socket.end()` is used instead of `socket.destroy()` to allow the other
// peer to finish sending queued data. There is no need to set a timer here
// because `CLOSING` means that it is already set or not needed.
//
this._socket.end();
if (!websocket._errorEmitted) {
websocket._errorEmitted = true;
websocket.emit('error', err);
}
}
/**
* Set a timer to destroy the underlying raw socket of a WebSocket.
*
* @param {WebSocket} websocket The WebSocket instance
* @private
*/
function setCloseTimer(websocket) {
websocket._closeTimer = setTimeout(
websocket._socket.destroy.bind(websocket._socket),
closeTimeout
);
}
/**
* The listener of the socket `'close'` event.
*
* @private
*/
function socketOnClose() {
const websocket = this[kWebSocket$1];
this.removeListener('close', socketOnClose);
this.removeListener('data', socketOnData);
this.removeListener('end', socketOnEnd);
websocket._readyState = WebSocket$1.CLOSING;
let chunk;
//
// The close frame might not have been received or the `'end'` event emitted,
// for example, if the socket was destroyed due to an error. Ensure that the
// `receiver` stream is closed after writing any remaining buffered data to
// it. If the readable side of the socket is in flowing mode then there is no
// buffered data as everything has been already written and `readable.read()`
// will return `null`. If instead, the socket is paused, any possible buffered
// data will be read as a single chunk.
//
if (
!this._readableState.endEmitted &&
!websocket._closeFrameReceived &&
!websocket._receiver._writableState.errorEmitted &&
(chunk = websocket._socket.read()) !== null
) {
websocket._receiver.write(chunk);
}
websocket._receiver.end();
this[kWebSocket$1] = undefined;
clearTimeout(websocket._closeTimer);
if (
websocket._receiver._writableState.finished ||
websocket._receiver._writableState.errorEmitted
) {
websocket.emitClose();
} else {
websocket._receiver.on('error', receiverOnFinish);
websocket._receiver.on('finish', receiverOnFinish);
}
}
/**
* The listener of the socket `'data'` event.
*
* @param {Buffer} chunk A chunk of data
* @private
*/
function socketOnData(chunk) {
if (!this[kWebSocket$1]._receiver.write(chunk)) {
this.pause();
}
}
/**
* The listener of the socket `'end'` event.
*
* @private
*/
function socketOnEnd() {
const websocket = this[kWebSocket$1];
websocket._readyState = WebSocket$1.CLOSING;
websocket._receiver.end();
this.end();
}
/**
* The listener of the socket `'error'` event.
*
* @private
*/
function socketOnError$1() {
const websocket = this[kWebSocket$1];
this.removeListener('error', socketOnError$1);
this.on('error', NOOP$1);
if (websocket) {
websocket._readyState = WebSocket$1.CLOSING;
this.destroy();
}
}
const { tokenChars } = validationExports;
/**
* Parses the `Sec-WebSocket-Protocol` header into a set of subprotocol names.
*
* @param {String} header The field value of the header
* @return {Set} The subprotocol names
* @public
*/
function parse(header) {
const protocols = new Set();
let start = -1;
let end = -1;
let i = 0;
for (i; i < header.length; i++) {
const code = header.charCodeAt(i);
if (end === -1 && tokenChars[code] === 1) {
if (start === -1) start = i;
} else if (
i !== 0 &&
(code === 0x20 /* ' ' */ || code === 0x09) /* '\t' */
) {
if (end === -1 && start !== -1) end = i;
} else if (code === 0x2c /* ',' */) {
if (start === -1) {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
if (end === -1) end = i;
const protocol = header.slice(start, end);
if (protocols.has(protocol)) {
throw new SyntaxError(`The "${protocol}" subprotocol is duplicated`);
}
protocols.add(protocol);
start = end = -1;
} else {
throw new SyntaxError(`Unexpected character at index ${i}`);
}
}
if (start === -1 || end !== -1) {
throw new SyntaxError('Unexpected end of input');
}
const protocol = header.slice(start, i);
if (protocols.has(protocol)) {
throw new SyntaxError(`The "${protocol}" subprotocol is duplicated`);
}
protocols.add(protocol);
return protocols;
}
var subprotocol$1 = { parse };
/* eslint no-unused-vars: ["error", { "varsIgnorePattern": "^Duplex$", "caughtErrors": "none" }] */
const EventEmitter = require$$0$7;
const http$2 = require$$1;
const { createHash } = require$$3$1;
const extension = extension$1;
const PerMessageDeflate = permessageDeflate;
const subprotocol = subprotocol$1;
const WebSocket = websocket;
const { GUID, kWebSocket } = constants;
const keyRegex = /^[+/0-9A-Za-z]{22}==$/;
const RUNNING = 0;
const CLOSING = 1;
const CLOSED = 2;
/**
* Class representing a WebSocket server.
*
* @extends EventEmitter
*/
class WebSocketServer extends EventEmitter {
/**
* Create a `WebSocketServer` instance.
*
* @param {Object} options Configuration options
* @param {Boolean} [options.allowSynchronousEvents=true] Specifies whether
* any of the `'message'`, `'ping'`, and `'pong'` events can be emitted
* multiple times in the same tick
* @param {Boolean} [options.autoPong=true] Specifies whether or not to
* automatically send a pong in response to a ping
* @param {Number} [options.backlog=511] The maximum length of the queue of
* pending connections
* @param {Boolean} [options.clientTracking=true] Specifies whether or not to
* track clients
* @param {Function} [options.handleProtocols] A hook to handle protocols
* @param {String} [options.host] The hostname where to bind the server
* @param {Number} [options.maxPayload=104857600] The maximum allowed message
* size
* @param {Boolean} [options.noServer=false] Enable no server mode
* @param {String} [options.path] Accept only connections matching this path
* @param {(Boolean|Object)} [options.perMessageDeflate=false] Enable/disable
* permessage-deflate
* @param {Number} [options.port] The port where to bind the server
* @param {(http.Server|https.Server)} [options.server] A pre-created HTTP/S
* server to use
* @param {Boolean} [options.skipUTF8Validation=false] Specifies whether or
* not to skip UTF-8 validation for text and close messages
* @param {Function} [options.verifyClient] A hook to reject connections
* @param {Function} [options.WebSocket=WebSocket] Specifies the `WebSocket`
* class to use. It must be the `WebSocket` class or class that extends it
* @param {Function} [callback] A listener for the `listening` event
*/
constructor(options, callback) {
super();
options = {
allowSynchronousEvents: true,
autoPong: true,
maxPayload: 100 * 1024 * 1024,
skipUTF8Validation: false,
perMessageDeflate: false,
handleProtocols: null,
clientTracking: true,
verifyClient: null,
noServer: false,
backlog: null, // use default (511 as implemented in net.js)
server: null,
host: null,
path: null,
port: null,
WebSocket,
...options
};
if (
(options.port == null && !options.server && !options.noServer) ||
(options.port != null && (options.server || options.noServer)) ||
(options.server && options.noServer)
) {
throw new TypeError(
'One and only one of the "port", "server", or "noServer" options ' +
'must be specified'
);
}
if (options.port != null) {
this._server = http$2.createServer((req, res) => {
const body = http$2.STATUS_CODES[426];
res.writeHead(426, {
'Content-Length': body.length,
'Content-Type': 'text/plain'
});
res.end(body);
});
this._server.listen(
options.port,
options.host,
options.backlog,
callback
);
} else if (options.server) {
this._server = options.server;
}
if (this._server) {
const emitConnection = this.emit.bind(this, 'connection');
this._removeListeners = addListeners(this._server, {
listening: this.emit.bind(this, 'listening'),
error: this.emit.bind(this, 'error'),
upgrade: (req, socket, head) => {
this.handleUpgrade(req, socket, head, emitConnection);
}
});
}
if (options.perMessageDeflate === true) options.perMessageDeflate = {};
if (options.clientTracking) {
this.clients = new Set();
this._shouldEmitClose = false;
}
this.options = options;
this._state = RUNNING;
}
/**
* Returns the bound address, the address family name, and port of the server
* as reported by the operating system if listening on an IP socket.
* If the server is listening on a pipe or UNIX domain socket, the name is
* returned as a string.
*
* @return {(Object|String|null)} The address of the server
* @public
*/
address() {
if (this.options.noServer) {
throw new Error('The server is operating in "noServer" mode');
}
if (!this._server) return null;
return this._server.address();
}
/**
* Stop the server from accepting new connections and emit the `'close'` event
* when all existing connections are closed.
*
* @param {Function} [cb] A one-time listener for the `'close'` event
* @public
*/
close(cb) {
if (this._state === CLOSED) {
if (cb) {
this.once('close', () => {
cb(new Error('The server is not running'));
});
}
process.nextTick(emitClose, this);
return;
}
if (cb) this.once('close', cb);
if (this._state === CLOSING) return;
this._state = CLOSING;
if (this.options.noServer || this.options.server) {
if (this._server) {
this._removeListeners();
this._removeListeners = this._server = null;
}
if (this.clients) {
if (!this.clients.size) {
process.nextTick(emitClose, this);
} else {
this._shouldEmitClose = true;
}
} else {
process.nextTick(emitClose, this);
}
} else {
const server = this._server;
this._removeListeners();
this._removeListeners = this._server = null;
//
// The HTTP/S server was created internally. Close it, and rely on its
// `'close'` event.
//
server.close(() => {
emitClose(this);
});
}
}
/**
* See if a given request should be handled by this server instance.
*
* @param {http.IncomingMessage} req Request object to inspect
* @return {Boolean} `true` if the request is valid, else `false`
* @public
*/
shouldHandle(req) {
if (this.options.path) {
const index = req.url.indexOf('?');
const pathname = index !== -1 ? req.url.slice(0, index) : req.url;
if (pathname !== this.options.path) return false;
}
return true;
}
/**
* Handle a HTTP Upgrade request.
*
* @param {http.IncomingMessage} req The request object
* @param {Duplex} socket The network socket between the server and client
* @param {Buffer} head The first packet of the upgraded stream
* @param {Function} cb Callback
* @public
*/
handleUpgrade(req, socket, head, cb) {
socket.on('error', socketOnError);
const key = req.headers['sec-websocket-key'];
const upgrade = req.headers.upgrade;
const version = +req.headers['sec-websocket-version'];
if (req.method !== 'GET') {
const message = 'Invalid HTTP method';
abortHandshakeOrEmitwsClientError(this, req, socket, 405, message);
return;
}
if (upgrade === undefined || upgrade.toLowerCase() !== 'websocket') {
const message = 'Invalid Upgrade header';
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message);
return;
}
if (key === undefined || !keyRegex.test(key)) {
const message = 'Missing or invalid Sec-WebSocket-Key header';
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message);
return;
}
if (version !== 8 && version !== 13) {
const message = 'Missing or invalid Sec-WebSocket-Version header';
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message);
return;
}
if (!this.shouldHandle(req)) {
abortHandshake(socket, 400);
return;
}
const secWebSocketProtocol = req.headers['sec-websocket-protocol'];
let protocols = new Set();
if (secWebSocketProtocol !== undefined) {
try {
protocols = subprotocol.parse(secWebSocketProtocol);
} catch (err) {
const message = 'Invalid Sec-WebSocket-Protocol header';
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message);
return;
}
}
const secWebSocketExtensions = req.headers['sec-websocket-extensions'];
const extensions = {};
if (
this.options.perMessageDeflate &&
secWebSocketExtensions !== undefined
) {
const perMessageDeflate = new PerMessageDeflate(
this.options.perMessageDeflate,
true,
this.options.maxPayload
);
try {
const offers = extension.parse(secWebSocketExtensions);
if (offers[PerMessageDeflate.extensionName]) {
perMessageDeflate.accept(offers[PerMessageDeflate.extensionName]);
extensions[PerMessageDeflate.extensionName] = perMessageDeflate;
}
} catch (err) {
const message =
'Invalid or unacceptable Sec-WebSocket-Extensions header';
abortHandshakeOrEmitwsClientError(this, req, socket, 400, message);
return;
}
}
//
// Optionally call external client verification handler.
//
if (this.options.verifyClient) {
const info = {
origin:
req.headers[`${version === 8 ? 'sec-websocket-origin' : 'origin'}`],
secure: !!(req.socket.authorized || req.socket.encrypted),
req
};
if (this.options.verifyClient.length === 2) {
this.options.verifyClient(info, (verified, code, message, headers) => {
if (!verified) {
return abortHandshake(socket, code || 401, message, headers);
}
this.completeUpgrade(
extensions,
key,
protocols,
req,
socket,
head,
cb
);
});
return;
}
if (!this.options.verifyClient(info)) return abortHandshake(socket, 401);
}
this.completeUpgrade(extensions, key, protocols, req, socket, head, cb);
}
/**
* Upgrade the connection to WebSocket.
*
* @param {Object} extensions The accepted extensions
* @param {String} key The value of the `Sec-WebSocket-Key` header
* @param {Set} protocols The subprotocols
* @param {http.IncomingMessage} req The request object
* @param {Duplex} socket The network socket between the server and client
* @param {Buffer} head The first packet of the upgraded stream
* @param {Function} cb Callback
* @throws {Error} If called more than once with the same socket
* @private
*/
completeUpgrade(extensions, key, protocols, req, socket, head, cb) {
//
// Destroy the socket if the client has already sent a FIN packet.
//
if (!socket.readable || !socket.writable) return socket.destroy();
if (socket[kWebSocket]) {
throw new Error(
'server.handleUpgrade() was called more than once with the same ' +
'socket, possibly due to a misconfiguration'
);
}
if (this._state > RUNNING) return abortHandshake(socket, 503);
const digest = createHash('sha1')
.update(key + GUID)
.digest('base64');
const headers = [
'HTTP/1.1 101 Switching Protocols',
'Upgrade: websocket',
'Connection: Upgrade',
`Sec-WebSocket-Accept: ${digest}`
];
const ws = new this.options.WebSocket(null, undefined, this.options);
if (protocols.size) {
//
// Optionally call external protocol selection handler.
//
const protocol = this.options.handleProtocols
? this.options.handleProtocols(protocols, req)
: protocols.values().next().value;
if (protocol) {
headers.push(`Sec-WebSocket-Protocol: ${protocol}`);
ws._protocol = protocol;
}
}
if (extensions[PerMessageDeflate.extensionName]) {
const params = extensions[PerMessageDeflate.extensionName].params;
const value = extension.format({
[PerMessageDeflate.extensionName]: [params]
});
headers.push(`Sec-WebSocket-Extensions: ${value}`);
ws._extensions = extensions;
}
//
// Allow external modification/inspection of handshake headers.
//
this.emit('headers', headers, req);
socket.write(headers.concat('\r\n').join('\r\n'));
socket.removeListener('error', socketOnError);
ws.setSocket(socket, head, {
allowSynchronousEvents: this.options.allowSynchronousEvents,
maxPayload: this.options.maxPayload,
skipUTF8Validation: this.options.skipUTF8Validation
});
if (this.clients) {
this.clients.add(ws);
ws.on('close', () => {
this.clients.delete(ws);
if (this._shouldEmitClose && !this.clients.size) {
process.nextTick(emitClose, this);
}
});
}
cb(ws, req);
}
}
var websocketServer = WebSocketServer;
/**
* Add event listeners on an `EventEmitter` using a map of <event, listener>
* pairs.
*
* @param {EventEmitter} server The event emitter
* @param {Object.<String, Function>} map The listeners to add
* @return {Function} A function that will remove the added listeners when
* called
* @private
*/
function addListeners(server, map) {
for (const event of Object.keys(map)) server.on(event, map[event]);
return function removeListeners() {
for (const event of Object.keys(map)) {
server.removeListener(event, map[event]);
}
};
}
/**
* Emit a `'close'` event on an `EventEmitter`.
*
* @param {EventEmitter} server The event emitter
* @private
*/
function emitClose(server) {
server._state = CLOSED;
server.emit('close');
}
/**
* Handle socket errors.
*
* @private
*/
function socketOnError() {
this.destroy();
}
/**
* Close the connection when preconditions are not fulfilled.
*
* @param {Duplex} socket The socket of the upgrade request
* @param {Number} code The HTTP response status code
* @param {String} [message] The HTTP response body
* @param {Object} [headers] Additional HTTP response headers
* @private
*/
function abortHandshake(socket, code, message, headers) {
//
// The socket is writable unless the user destroyed or ended it before calling
// `server.handleUpgrade()` or in the `verifyClient` function, which is a user
// error. Handling this does not make much sense as the worst that can happen
// is that some of the data written by the user might be discarded due to the
// call to `socket.end()` below, which triggers an `'error'` event that in
// turn causes the socket to be destroyed.
//
message = message || http$2.STATUS_CODES[code];
headers = {
Connection: 'close',
'Content-Type': 'text/html',
'Content-Length': Buffer.byteLength(message),
...headers
};
socket.once('finish', socket.destroy);
socket.end(
`HTTP/1.1 ${code} ${http$2.STATUS_CODES[code]}\r\n` +
Object.keys(headers)
.map((h) => `${h}: ${headers[h]}`)
.join('\r\n') +
'\r\n\r\n' +
message
);
}
/**
* Emit a `'wsClientError'` event on a `WebSocketServer` if there is at least
* one listener for it, otherwise call `abortHandshake()`.
*
* @param {WebSocketServer} server The WebSocket server
* @param {http.IncomingMessage} req The request object
* @param {Duplex} socket The socket of the upgrade request
* @param {Number} code The HTTP response status code
* @param {String} message The HTTP response body
* @private
*/
function abortHandshakeOrEmitwsClientError(server, req, socket, code, message) {
if (server.listenerCount('wsClientError')) {
const err = new Error(message);
Error.captureStackTrace(err, abortHandshakeOrEmitwsClientError);
server.emit('wsClientError', err, socket, req);
} else {
abortHandshake(socket, code, message);
}
}
var WebSocketServerRaw_ = /*@__PURE__*/getDefaultExportFromCjs(websocketServer);
const WebSocketServerRaw = process.versions.bun ? (
// @ts-expect-error: Bun defines `import.meta.require`
import.meta.require("ws").WebSocketServer
) : WebSocketServerRaw_;
const HMR_HEADER = "vite-hmr";
const wsServerEvents = [
"connection",
"error",
"headers",
"listening",
"message"
];
function noop$1() {
}
function createWebSocketServer(server, config, httpsOptions) {
if (config.server.ws === false) {
return {
name: "ws",
get clients() {
return /* @__PURE__ */ new Set();
},
async close() {
},
on: noop$1,
off: noop$1,
listen: noop$1,
send: noop$1
};
}
let wss;
let wsHttpServer = void 0;
const hmr = isObject$1(config.server.hmr) && config.server.hmr;
const hmrServer = hmr && hmr.server;
const hmrPort = hmr && hmr.port;
const portsAreCompatible = !hmrPort || hmrPort === config.server.port;
const wsServer = hmrServer || portsAreCompatible && server;
let hmrServerWsListener;
const customListeners = /* @__PURE__ */ new Map();
const clientsMap = /* @__PURE__ */ new WeakMap();
const port = hmrPort || 24678;
const host = hmr && hmr.host || void 0;
if (wsServer) {
let hmrBase = config.base;
const hmrPath = hmr ? hmr.path : void 0;
if (hmrPath) {
hmrBase = path$n.posix.join(hmrBase, hmrPath);
}
wss = new WebSocketServerRaw({ noServer: true });
hmrServerWsListener = (req, socket, head) => {
if (req.headers["sec-websocket-protocol"] === HMR_HEADER && req.url === hmrBase) {
wss.handleUpgrade(req, socket, head, (ws) => {
wss.emit("connection", ws, req);
});
}
};
wsServer.on("upgrade", hmrServerWsListener);
} else {
const route = (_, res) => {
const statusCode = 426;
const body = STATUS_CODES[statusCode];
if (!body)
throw new Error(`No body text found for the ${statusCode} status code`);
res.writeHead(statusCode, {
"Content-Length": body.length,
"Content-Type": "text/plain"
});
res.end(body);
};
if (httpsOptions) {
wsHttpServer = createServer$2(httpsOptions, route);
} else {
wsHttpServer = createServer$3(route);
}
wss = new WebSocketServerRaw({ server: wsHttpServer });
}
wss.on("connection", (socket) => {
socket.on("message", (raw) => {
if (!customListeners.size) return;
let parsed;
try {
parsed = JSON.parse(String(raw));
} catch {
}
if (!parsed || parsed.type !== "custom" || !parsed.event) return;
const listeners = customListeners.get(parsed.event);
if (!listeners?.size) return;
const client = getSocketClient(socket);
listeners.forEach((listener) => listener(parsed.data, client));
});
socket.on("error", (err) => {
config.logger.error(`${colors$1.red(`ws error:`)}
${err.stack}`, {
timestamp: true,
error: err
});
});
socket.send(JSON.stringify({ type: "connected" }));
if (bufferedError) {
socket.send(JSON.stringify(bufferedError));
bufferedError = null;
}
});
wss.on("error", (e) => {
if (e.code === "EADDRINUSE") {
config.logger.error(
colors$1.red(`WebSocket server error: Port is already in use`),
{ error: e }
);
} else {
config.logger.error(
colors$1.red(`WebSocket server error:
${e.stack || e.message}`),
{ error: e }
);
}
});
function getSocketClient(socket) {
if (!clientsMap.has(socket)) {
clientsMap.set(socket, {
send: (...args) => {
let payload;
if (typeof args[0] === "string") {
payload = {
type: "custom",
event: args[0],
data: args[1]
};
} else {
payload = args[0];
}
socket.send(JSON.stringify(payload));
},
socket
});
}
return clientsMap.get(socket);
}
let bufferedError = null;
return {
name: "ws",
listen: () => {
wsHttpServer?.listen(port, host);
},
on: (event, fn) => {
if (wsServerEvents.includes(event)) wss.on(event, fn);
else {
if (!customListeners.has(event)) {
customListeners.set(event, /* @__PURE__ */ new Set());
}
customListeners.get(event).add(fn);
}
},
off: (event, fn) => {
if (wsServerEvents.includes(event)) {
wss.off(event, fn);
} else {
customListeners.get(event)?.delete(fn);
}
},
get clients() {
return new Set(Array.from(wss.clients).map(getSocketClient));
},
send(...args) {
let payload;
if (typeof args[0] === "string") {
payload = {
type: "custom",
event: args[0],
data: args[1]
};
} else {
payload = args[0];
}
if (payload.type === "error" && !wss.clients.size) {
bufferedError = payload;
return;
}
const stringified = JSON.stringify(payload);
wss.clients.forEach((client) => {
if (client.readyState === 1) {
client.send(stringified);
}
});
},
close() {
if (hmrServerWsListener && wsServer) {
wsServer.off("upgrade", hmrServerWsListener);
}
return new Promise((resolve, reject) => {
wss.clients.forEach((client) => {
client.terminate();
});
wss.close((err) => {
if (err) {
reject(err);
} else {
if (wsHttpServer) {
wsHttpServer.close((err2) => {
if (err2) {
reject(err2);
} else {
resolve();
}
});
} else {
resolve();
}
}
});
});
}
};
}
function baseMiddleware(rawBase, middlewareMode) {
return function viteBaseMiddleware(req, res, next) {
const url = req.url;
const pathname = cleanUrl(url);
const base = rawBase;
if (pathname.startsWith(base)) {
req.url = stripBase(url, base);
return next();
}
if (middlewareMode) {
return next();
}
if (pathname === "/" || pathname === "/index.html") {
res.writeHead(302, {
Location: base + url.slice(pathname.length)
});
res.end();
return;
}
const redirectPath = withTrailingSlash(url) !== base ? joinUrlSegments(base, url) : base;
if (req.headers.accept?.includes("text/html")) {
res.writeHead(404, {
"Content-Type": "text/html"
});
res.end(
`The server is configured with a public base URL of ${base} - did you mean to visit <a href="${redirectPath}">${redirectPath}</a> instead?`
);
return;
} else {
res.writeHead(404, {
"Content-Type": "text/plain"
});
res.end(
`The server is configured with a public base URL of ${base} - did you mean to visit ${redirectPath} instead?`
);
return;
}
};
}
var httpProxy$3 = {exports: {}};
var eventemitter3 = {exports: {}};
(function (module) {
var has = Object.prototype.hasOwnProperty
, prefix = '~';
/**
* Constructor to create a storage for our `EE` objects.
* An `Events` instance is a plain object whose properties are event names.
*
* @constructor
* @private
*/
function Events() {}
//
// We try to not inherit from `Object.prototype`. In some engines creating an
// instance in this way is faster than calling `Object.create(null)` directly.
// If `Object.create(null)` is not supported we prefix the event names with a
// character to make sure that the built-in object properties are not
// overridden or used as an attack vector.
//
if (Object.create) {
Events.prototype = Object.create(null);
//
// This hack is needed because the `__proto__` property is still inherited in
// some old browsers like Android 4, iPhone 5.1, Opera 11 and Safari 5.
//
if (!new Events().__proto__) prefix = false;
}
/**
* Representation of a single event listener.
*
* @param {Function} fn The listener function.
* @param {*} context The context to invoke the listener with.
* @param {Boolean} [once=false] Specify if the listener is a one-time listener.
* @constructor
* @private
*/
function EE(fn, context, once) {
this.fn = fn;
this.context = context;
this.once = once || false;
}
/**
* Add a listener for a given event.
*
* @param {EventEmitter} emitter Reference to the `EventEmitter` instance.
* @param {(String|Symbol)} event The event name.
* @param {Function} fn The listener function.
* @param {*} context The context to invoke the listener with.
* @param {Boolean} once Specify if the listener is a one-time listener.
* @returns {EventEmitter}
* @private
*/
function addListener(emitter, event, fn, context, once) {
if (typeof fn !== 'function') {
throw new TypeError('The listener must be a function');
}
var listener = new EE(fn, context || emitter, once)
, evt = prefix ? prefix + event : event;
if (!emitter._events[evt]) emitter._events[evt] = listener, emitter._eventsCount++;
else if (!emitter._events[evt].fn) emitter._events[evt].push(listener);
else emitter._events[evt] = [emitter._events[evt], listener];
return emitter;
}
/**
* Clear event by name.
*
* @param {EventEmitter} emitter Reference to the `EventEmitter` instance.
* @param {(String|Symbol)} evt The Event name.
* @private
*/
function clearEvent(emitter, evt) {
if (--emitter._eventsCount === 0) emitter._events = new Events();
else delete emitter._events[evt];
}
/**
* Minimal `EventEmitter` interface that is molded against the Node.js
* `EventEmitter` interface.
*
* @constructor
* @public
*/
function EventEmitter() {
this._events = new Events();
this._eventsCount = 0;
}
/**
* Return an array listing the events for which the emitter has registered
* listeners.
*
* @returns {Array}
* @public
*/
EventEmitter.prototype.eventNames = function eventNames() {
var names = []
, events
, name;
if (this._eventsCount === 0) return names;
for (name in (events = this._events)) {
if (has.call(events, name)) names.push(prefix ? name.slice(1) : name);
}
if (Object.getOwnPropertySymbols) {
return names.concat(Object.getOwnPropertySymbols(events));
}
return names;
};
/**
* Return the listeners registered for a given event.
*
* @param {(String|Symbol)} event The event name.
* @returns {Array} The registered listeners.
* @public
*/
EventEmitter.prototype.listeners = function listeners(event) {
var evt = prefix ? prefix + event : event
, handlers = this._events[evt];
if (!handlers) return [];
if (handlers.fn) return [handlers.fn];
for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) {
ee[i] = handlers[i].fn;
}
return ee;
};
/**
* Return the number of listeners listening to a given event.
*
* @param {(String|Symbol)} event The event name.
* @returns {Number} The number of listeners.
* @public
*/
EventEmitter.prototype.listenerCount = function listenerCount(event) {
var evt = prefix ? prefix + event : event
, listeners = this._events[evt];
if (!listeners) return 0;
if (listeners.fn) return 1;
return listeners.length;
};
/**
* Calls each of the listeners registered for a given event.
*
* @param {(String|Symbol)} event The event name.
* @returns {Boolean} `true` if the event had listeners, else `false`.
* @public
*/
EventEmitter.prototype.emit = function emit(event, a1, a2, a3, a4, a5) {
var evt = prefix ? prefix + event : event;
if (!this._events[evt]) return false;
var listeners = this._events[evt]
, len = arguments.length
, args
, i;
if (listeners.fn) {
if (listeners.once) this.removeListener(event, listeners.fn, undefined, true);
switch (len) {
case 1: return listeners.fn.call(listeners.context), true;
case 2: return listeners.fn.call(listeners.context, a1), true;
case 3: return listeners.fn.call(listeners.context, a1, a2), true;
case 4: return listeners.fn.call(listeners.context, a1, a2, a3), true;
case 5: return listeners.fn.call(listeners.context, a1, a2, a3, a4), true;
case 6: return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true;
}
for (i = 1, args = new Array(len -1); i < len; i++) {
args[i - 1] = arguments[i];
}
listeners.fn.apply(listeners.context, args);
} else {
var length = listeners.length
, j;
for (i = 0; i < length; i++) {
if (listeners[i].once) this.removeListener(event, listeners[i].fn, undefined, true);
switch (len) {
case 1: listeners[i].fn.call(listeners[i].context); break;
case 2: listeners[i].fn.call(listeners[i].context, a1); break;
case 3: listeners[i].fn.call(listeners[i].context, a1, a2); break;
case 4: listeners[i].fn.call(listeners[i].context, a1, a2, a3); break;
default:
if (!args) for (j = 1, args = new Array(len -1); j < len; j++) {
args[j - 1] = arguments[j];
}
listeners[i].fn.apply(listeners[i].context, args);
}
}
}
return true;
};
/**
* Add a listener for a given event.
*
* @param {(String|Symbol)} event The event name.
* @param {Function} fn The listener function.
* @param {*} [context=this] The context to invoke the listener with.
* @returns {EventEmitter} `this`.
* @public
*/
EventEmitter.prototype.on = function on(event, fn, context) {
return addListener(this, event, fn, context, false);
};
/**
* Add a one-time listener for a given event.
*
* @param {(String|Symbol)} event The event name.
* @param {Function} fn The listener function.
* @param {*} [context=this] The context to invoke the listener with.
* @returns {EventEmitter} `this`.
* @public
*/
EventEmitter.prototype.once = function once(event, fn, context) {
return addListener(this, event, fn, context, true);
};
/**
* Remove the listeners of a given event.
*
* @param {(String|Symbol)} event The event name.
* @param {Function} fn Only remove the listeners that match this function.
* @param {*} context Only remove the listeners that have this context.
* @param {Boolean} once Only remove one-time listeners.
* @returns {EventEmitter} `this`.
* @public
*/
EventEmitter.prototype.removeListener = function removeListener(event, fn, context, once) {
var evt = prefix ? prefix + event : event;
if (!this._events[evt]) return this;
if (!fn) {
clearEvent(this, evt);
return this;
}
var listeners = this._events[evt];
if (listeners.fn) {
if (
listeners.fn === fn &&
(!once || listeners.once) &&
(!context || listeners.context === context)
) {
clearEvent(this, evt);
}
} else {
for (var i = 0, events = [], length = listeners.length; i < length; i++) {
if (
listeners[i].fn !== fn ||
(once && !listeners[i].once) ||
(context && listeners[i].context !== context)
) {
events.push(listeners[i]);
}
}
//
// Reset the array, or remove it completely if we have no more listeners.
//
if (events.length) this._events[evt] = events.length === 1 ? events[0] : events;
else clearEvent(this, evt);
}
return this;
};
/**
* Remove all listeners, or those of the specified event.
*
* @param {(String|Symbol)} [event] The event name.
* @returns {EventEmitter} `this`.
* @public
*/
EventEmitter.prototype.removeAllListeners = function removeAllListeners(event) {
var evt;
if (event) {
evt = prefix ? prefix + event : event;
if (this._events[evt]) clearEvent(this, evt);
} else {
this._events = new Events();
this._eventsCount = 0;
}
return this;
};
//
// Alias methods names because people roll like that.
//
EventEmitter.prototype.off = EventEmitter.prototype.removeListener;
EventEmitter.prototype.addListener = EventEmitter.prototype.on;
//
// Expose the prefix.
//
EventEmitter.prefixed = prefix;
//
// Allow `EventEmitter` to be imported as module namespace.
//
EventEmitter.EventEmitter = EventEmitter;
//
// Expose the module.
//
{
module.exports = EventEmitter;
}
} (eventemitter3));
var eventemitter3Exports = eventemitter3.exports;
var common$3 = {};
/**
* Check if we're required to add a port number.
*
* @see https://url.spec.whatwg.org/#default-port
* @param {Number|String} port Port number we need to check
* @param {String} protocol Protocol we need to check against.
* @returns {Boolean} Is it a default port for the given protocol
* @api private
*/
var requiresPort = function required(port, protocol) {
protocol = protocol.split(':')[0];
port = +port;
if (!port) return false;
switch (protocol) {
case 'http':
case 'ws':
return port !== 80;
case 'https':
case 'wss':
return port !== 443;
case 'ftp':
return port !== 21;
case 'gopher':
return port !== 70;
case 'file':
return false;
}
return port !== 0;
};
(function (exports) {
var common = exports,
url = require$$0$9,
required = requiresPort;
var upgradeHeader = /(^|,)\s*upgrade\s*($|,)/i,
isSSL = /^https|wss/;
/**
* Simple Regex for testing if protocol is https
*/
common.isSSL = isSSL;
/**
* Copies the right headers from `options` and `req` to
* `outgoing` which is then used to fire the proxied
* request.
*
* Examples:
*
* common.setupOutgoing(outgoing, options, req)
* // => { host: ..., hostname: ...}
*
* @param {Object} Outgoing Base object to be filled with required properties
* @param {Object} Options Config object passed to the proxy
* @param {ClientRequest} Req Request Object
* @param {String} Forward String to select forward or target
*
* @return {Object} Outgoing Object with all required properties set
*
* @api private
*/
common.setupOutgoing = function(outgoing, options, req, forward) {
outgoing.port = options[forward || 'target'].port ||
(isSSL.test(options[forward || 'target'].protocol) ? 443 : 80);
['host', 'hostname', 'socketPath', 'pfx', 'key',
'passphrase', 'cert', 'ca', 'ciphers', 'secureProtocol'].forEach(
function(e) { outgoing[e] = options[forward || 'target'][e]; }
);
outgoing.method = options.method || req.method;
outgoing.headers = Object.assign({}, req.headers);
if (options.headers){
Object.assign(outgoing.headers, options.headers);
}
if (options.auth) {
outgoing.auth = options.auth;
}
if (options.ca) {
outgoing.ca = options.ca;
}
if (isSSL.test(options[forward || 'target'].protocol)) {
outgoing.rejectUnauthorized = (typeof options.secure === "undefined") ? true : options.secure;
}
outgoing.agent = options.agent || false;
outgoing.localAddress = options.localAddress;
//
// Remark: If we are false and not upgrading, set the connection: close. This is the right thing to do
// as node core doesn't handle this COMPLETELY properly yet.
//
if (!outgoing.agent) {
outgoing.headers = outgoing.headers || {};
if (typeof outgoing.headers.connection !== 'string'
|| !upgradeHeader.test(outgoing.headers.connection)
) { outgoing.headers.connection = 'close'; }
}
// the final path is target path + relative path requested by user:
var target = options[forward || 'target'];
var targetPath = target && options.prependPath !== false
? (target.path || '')
: '';
//
// Remark: Can we somehow not use url.parse as a perf optimization?
//
var outgoingPath = !options.toProxy
? (url.parse(req.url).path || '')
: req.url;
//
// Remark: ignorePath will just straight up ignore whatever the request's
// path is. This can be labeled as FOOT-GUN material if you do not know what
// you are doing and are using conflicting options.
//
outgoingPath = !options.ignorePath ? outgoingPath : '';
outgoing.path = common.urlJoin(targetPath, outgoingPath);
if (options.changeOrigin) {
outgoing.headers.host =
required(outgoing.port, options[forward || 'target'].protocol) && !hasPort(outgoing.host)
? outgoing.host + ':' + outgoing.port
: outgoing.host;
}
return outgoing;
};
/**
* Set the proper configuration for sockets,
* set no delay and set keep alive, also set
* the timeout to 0.
*
* Examples:
*
* common.setupSocket(socket)
* // => Socket
*
* @param {Socket} Socket instance to setup
*
* @return {Socket} Return the configured socket.
*
* @api private
*/
common.setupSocket = function(socket) {
socket.setTimeout(0);
socket.setNoDelay(true);
socket.setKeepAlive(true, 0);
return socket;
};
/**
* Get the port number from the host. Or guess it based on the connection type.
*
* @param {Request} req Incoming HTTP request.
*
* @return {String} The port number.
*
* @api private
*/
common.getPort = function(req) {
var res = req.headers.host ? req.headers.host.match(/:(\d+)/) : '';
return res ?
res[1] :
common.hasEncryptedConnection(req) ? '443' : '80';
};
/**
* Check if the request has an encrypted connection.
*
* @param {Request} req Incoming HTTP request.
*
* @return {Boolean} Whether the connection is encrypted or not.
*
* @api private
*/
common.hasEncryptedConnection = function(req) {
return Boolean(req.connection.encrypted || req.connection.pair);
};
/**
* OS-agnostic join (doesn't break on URLs like path.join does on Windows)>
*
* @return {String} The generated path.
*
* @api private
*/
common.urlJoin = function() {
//
// We do not want to mess with the query string. All we want to touch is the path.
//
var args = Array.prototype.slice.call(arguments),
lastIndex = args.length - 1,
last = args[lastIndex],
lastSegs = last.split('?'),
retSegs;
args[lastIndex] = lastSegs.shift();
//
// Join all strings, but remove empty strings so we don't get extra slashes from
// joining e.g. ['', 'am']
//
retSegs = [
args.filter(Boolean).join('/')
.replace(/\/+/g, '/')
.replace('http:/', 'http://')
.replace('https:/', 'https://')
];
// Only join the query string if it exists so we don't have trailing a '?'
// on every request
// Handle case where there could be multiple ? in the URL.
retSegs.push.apply(retSegs, lastSegs);
return retSegs.join('?')
};
/**
* Rewrites or removes the domain of a cookie header
*
* @param {String|Array} Header
* @param {Object} Config, mapping of domain to rewritten domain.
* '*' key to match any domain, null value to remove the domain.
*
* @api private
*/
common.rewriteCookieProperty = function rewriteCookieProperty(header, config, property) {
if (Array.isArray(header)) {
return header.map(function (headerElement) {
return rewriteCookieProperty(headerElement, config, property);
});
}
return header.replace(new RegExp("(;\\s*" + property + "=)([^;]+)", 'i'), function(match, prefix, previousValue) {
var newValue;
if (previousValue in config) {
newValue = config[previousValue];
} else if ('*' in config) {
newValue = config['*'];
} else {
//no match, return previous value
return match;
}
if (newValue) {
//replace value
return prefix + newValue;
} else {
//remove value
return '';
}
});
};
/**
* Check the host and see if it potentially has a port in it (keep it simple)
*
* @returns {Boolean} Whether we have one or not
*
* @api private
*/
function hasPort(host) {
return !!~host.indexOf(':');
}} (common$3));
var url$1 = require$$0$9,
common$2 = common$3;
var redirectRegex = /^201|30(1|2|7|8)$/;
/*!
* Array of passes.
*
* A `pass` is just a function that is executed on `req, res, options`
* so that you can easily add new checks while still keeping the base
* flexible.
*/
var webOutgoing = { // <--
/**
* If is a HTTP 1.0 request, remove chunk headers
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {proxyResponse} Res Response object from the proxy request
*
* @api private
*/
removeChunked: function removeChunked(req, res, proxyRes) {
if (req.httpVersion === '1.0') {
delete proxyRes.headers['transfer-encoding'];
}
},
/**
* If is a HTTP 1.0 request, set the correct connection header
* or if connection header not present, then use `keep-alive`
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {proxyResponse} Res Response object from the proxy request
*
* @api private
*/
setConnection: function setConnection(req, res, proxyRes) {
if (req.httpVersion === '1.0') {
proxyRes.headers.connection = req.headers.connection || 'close';
} else if (req.httpVersion !== '2.0' && !proxyRes.headers.connection) {
proxyRes.headers.connection = req.headers.connection || 'keep-alive';
}
},
setRedirectHostRewrite: function setRedirectHostRewrite(req, res, proxyRes, options) {
if ((options.hostRewrite || options.autoRewrite || options.protocolRewrite)
&& proxyRes.headers['location']
&& redirectRegex.test(proxyRes.statusCode)) {
var target = url$1.parse(options.target);
var u = url$1.parse(proxyRes.headers['location']);
// make sure the redirected host matches the target host before rewriting
if (target.host != u.host) {
return;
}
if (options.hostRewrite) {
u.host = options.hostRewrite;
} else if (options.autoRewrite) {
u.host = req.headers['host'];
}
if (options.protocolRewrite) {
u.protocol = options.protocolRewrite;
}
proxyRes.headers['location'] = u.format();
}
},
/**
* Copy headers from proxyResponse to response
* set each header in response object.
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {proxyResponse} Res Response object from the proxy request
* @param {Object} Options options.cookieDomainRewrite: Config to rewrite cookie domain
*
* @api private
*/
writeHeaders: function writeHeaders(req, res, proxyRes, options) {
var rewriteCookieDomainConfig = options.cookieDomainRewrite,
rewriteCookiePathConfig = options.cookiePathRewrite,
preserveHeaderKeyCase = options.preserveHeaderKeyCase,
rawHeaderKeyMap,
setHeader = function(key, header) {
if (header == undefined) return;
if (rewriteCookieDomainConfig && key.toLowerCase() === 'set-cookie') {
header = common$2.rewriteCookieProperty(header, rewriteCookieDomainConfig, 'domain');
}
if (rewriteCookiePathConfig && key.toLowerCase() === 'set-cookie') {
header = common$2.rewriteCookieProperty(header, rewriteCookiePathConfig, 'path');
}
res.setHeader(String(key).trim(), header);
};
if (typeof rewriteCookieDomainConfig === 'string') { //also test for ''
rewriteCookieDomainConfig = { '*': rewriteCookieDomainConfig };
}
if (typeof rewriteCookiePathConfig === 'string') { //also test for ''
rewriteCookiePathConfig = { '*': rewriteCookiePathConfig };
}
// message.rawHeaders is added in: v0.11.6
// https://nodejs.org/api/http.html#http_message_rawheaders
if (preserveHeaderKeyCase && proxyRes.rawHeaders != undefined) {
rawHeaderKeyMap = {};
for (var i = 0; i < proxyRes.rawHeaders.length; i += 2) {
var key = proxyRes.rawHeaders[i];
rawHeaderKeyMap[key.toLowerCase()] = key;
}
}
Object.keys(proxyRes.headers).forEach(function(key) {
var header = proxyRes.headers[key];
if (preserveHeaderKeyCase && rawHeaderKeyMap) {
key = rawHeaderKeyMap[key] || key;
}
setHeader(key, header);
});
},
/**
* Set the statusCode from the proxyResponse
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {proxyResponse} Res Response object from the proxy request
*
* @api private
*/
writeStatusCode: function writeStatusCode(req, res, proxyRes) {
// From Node.js docs: response.writeHead(statusCode[, statusMessage][, headers])
if(proxyRes.statusMessage) {
res.statusCode = proxyRes.statusCode;
res.statusMessage = proxyRes.statusMessage;
} else {
res.statusCode = proxyRes.statusCode;
}
}
};
var followRedirects$1 = {exports: {}};
var debug$6;
var debug_1 = function () {
if (!debug$6) {
try {
/* eslint global-require: off */
debug$6 = srcExports$1("follow-redirects");
}
catch (error) { /* */ }
if (typeof debug$6 !== "function") {
debug$6 = function () { /* */ };
}
}
debug$6.apply(null, arguments);
};
var url = require$$0$9;
var URL$1 = url.URL;
var http$1 = require$$1;
var https$1 = require$$1$1;
var Writable = require$$0$6.Writable;
var assert = require$$4$3;
var debug$5 = debug_1;
// Whether to use the native URL object or the legacy url module
var useNativeURL = false;
try {
assert(new URL$1());
}
catch (error) {
useNativeURL = error.code === "ERR_INVALID_URL";
}
// URL fields to preserve in copy operations
var preservedUrlFields = [
"auth",
"host",
"hostname",
"href",
"path",
"pathname",
"port",
"protocol",
"query",
"search",
"hash",
];
// Create handlers that pass events from native requests
var events = ["abort", "aborted", "connect", "error", "socket", "timeout"];
var eventHandlers = Object.create(null);
events.forEach(function (event) {
eventHandlers[event] = function (arg1, arg2, arg3) {
this._redirectable.emit(event, arg1, arg2, arg3);
};
});
// Error types with codes
var InvalidUrlError = createErrorType(
"ERR_INVALID_URL",
"Invalid URL",
TypeError
);
var RedirectionError = createErrorType(
"ERR_FR_REDIRECTION_FAILURE",
"Redirected request failed"
);
var TooManyRedirectsError = createErrorType(
"ERR_FR_TOO_MANY_REDIRECTS",
"Maximum number of redirects exceeded",
RedirectionError
);
var MaxBodyLengthExceededError = createErrorType(
"ERR_FR_MAX_BODY_LENGTH_EXCEEDED",
"Request body larger than maxBodyLength limit"
);
var WriteAfterEndError = createErrorType(
"ERR_STREAM_WRITE_AFTER_END",
"write after end"
);
// istanbul ignore next
var destroy = Writable.prototype.destroy || noop;
// An HTTP(S) request that can be redirected
function RedirectableRequest(options, responseCallback) {
// Initialize the request
Writable.call(this);
this._sanitizeOptions(options);
this._options = options;
this._ended = false;
this._ending = false;
this._redirectCount = 0;
this._redirects = [];
this._requestBodyLength = 0;
this._requestBodyBuffers = [];
// Attach a callback if passed
if (responseCallback) {
this.on("response", responseCallback);
}
// React to responses of native requests
var self = this;
this._onNativeResponse = function (response) {
try {
self._processResponse(response);
}
catch (cause) {
self.emit("error", cause instanceof RedirectionError ?
cause : new RedirectionError({ cause: cause }));
}
};
// Perform the first request
this._performRequest();
}
RedirectableRequest.prototype = Object.create(Writable.prototype);
RedirectableRequest.prototype.abort = function () {
destroyRequest(this._currentRequest);
this._currentRequest.abort();
this.emit("abort");
};
RedirectableRequest.prototype.destroy = function (error) {
destroyRequest(this._currentRequest, error);
destroy.call(this, error);
return this;
};
// Writes buffered data to the current native request
RedirectableRequest.prototype.write = function (data, encoding, callback) {
// Writing is not allowed if end has been called
if (this._ending) {
throw new WriteAfterEndError();
}
// Validate input and shift parameters if necessary
if (!isString(data) && !isBuffer(data)) {
throw new TypeError("data should be a string, Buffer or Uint8Array");
}
if (isFunction(encoding)) {
callback = encoding;
encoding = null;
}
// Ignore empty buffers, since writing them doesn't invoke the callback
// https://github.com/nodejs/node/issues/22066
if (data.length === 0) {
if (callback) {
callback();
}
return;
}
// Only write when we don't exceed the maximum body length
if (this._requestBodyLength + data.length <= this._options.maxBodyLength) {
this._requestBodyLength += data.length;
this._requestBodyBuffers.push({ data: data, encoding: encoding });
this._currentRequest.write(data, encoding, callback);
}
// Error when we exceed the maximum body length
else {
this.emit("error", new MaxBodyLengthExceededError());
this.abort();
}
};
// Ends the current native request
RedirectableRequest.prototype.end = function (data, encoding, callback) {
// Shift parameters if necessary
if (isFunction(data)) {
callback = data;
data = encoding = null;
}
else if (isFunction(encoding)) {
callback = encoding;
encoding = null;
}
// Write data if needed and end
if (!data) {
this._ended = this._ending = true;
this._currentRequest.end(null, null, callback);
}
else {
var self = this;
var currentRequest = this._currentRequest;
this.write(data, encoding, function () {
self._ended = true;
currentRequest.end(null, null, callback);
});
this._ending = true;
}
};
// Sets a header value on the current native request
RedirectableRequest.prototype.setHeader = function (name, value) {
this._options.headers[name] = value;
this._currentRequest.setHeader(name, value);
};
// Clears a header value on the current native request
RedirectableRequest.prototype.removeHeader = function (name) {
delete this._options.headers[name];
this._currentRequest.removeHeader(name);
};
// Global timeout for all underlying requests
RedirectableRequest.prototype.setTimeout = function (msecs, callback) {
var self = this;
// Destroys the socket on timeout
function destroyOnTimeout(socket) {
socket.setTimeout(msecs);
socket.removeListener("timeout", socket.destroy);
socket.addListener("timeout", socket.destroy);
}
// Sets up a timer to trigger a timeout event
function startTimer(socket) {
if (self._timeout) {
clearTimeout(self._timeout);
}
self._timeout = setTimeout(function () {
self.emit("timeout");
clearTimer();
}, msecs);
destroyOnTimeout(socket);
}
// Stops a timeout from triggering
function clearTimer() {
// Clear the timeout
if (self._timeout) {
clearTimeout(self._timeout);
self._timeout = null;
}
// Clean up all attached listeners
self.removeListener("abort", clearTimer);
self.removeListener("error", clearTimer);
self.removeListener("response", clearTimer);
self.removeListener("close", clearTimer);
if (callback) {
self.removeListener("timeout", callback);
}
if (!self.socket) {
self._currentRequest.removeListener("socket", startTimer);
}
}
// Attach callback if passed
if (callback) {
this.on("timeout", callback);
}
// Start the timer if or when the socket is opened
if (this.socket) {
startTimer(this.socket);
}
else {
this._currentRequest.once("socket", startTimer);
}
// Clean up on events
this.on("socket", destroyOnTimeout);
this.on("abort", clearTimer);
this.on("error", clearTimer);
this.on("response", clearTimer);
this.on("close", clearTimer);
return this;
};
// Proxy all other public ClientRequest methods
[
"flushHeaders", "getHeader",
"setNoDelay", "setSocketKeepAlive",
].forEach(function (method) {
RedirectableRequest.prototype[method] = function (a, b) {
return this._currentRequest[method](a, b);
};
});
// Proxy all public ClientRequest properties
["aborted", "connection", "socket"].forEach(function (property) {
Object.defineProperty(RedirectableRequest.prototype, property, {
get: function () { return this._currentRequest[property]; },
});
});
RedirectableRequest.prototype._sanitizeOptions = function (options) {
// Ensure headers are always present
if (!options.headers) {
options.headers = {};
}
// Since http.request treats host as an alias of hostname,
// but the url module interprets host as hostname plus port,
// eliminate the host property to avoid confusion.
if (options.host) {
// Use hostname if set, because it has precedence
if (!options.hostname) {
options.hostname = options.host;
}
delete options.host;
}
// Complete the URL object when necessary
if (!options.pathname && options.path) {
var searchPos = options.path.indexOf("?");
if (searchPos < 0) {
options.pathname = options.path;
}
else {
options.pathname = options.path.substring(0, searchPos);
options.search = options.path.substring(searchPos);
}
}
};
// Executes the next native request (initial or redirect)
RedirectableRequest.prototype._performRequest = function () {
// Load the native protocol
var protocol = this._options.protocol;
var nativeProtocol = this._options.nativeProtocols[protocol];
if (!nativeProtocol) {
throw new TypeError("Unsupported protocol " + protocol);
}
// If specified, use the agent corresponding to the protocol
// (HTTP and HTTPS use different types of agents)
if (this._options.agents) {
var scheme = protocol.slice(0, -1);
this._options.agent = this._options.agents[scheme];
}
// Create the native request and set up its event handlers
var request = this._currentRequest =
nativeProtocol.request(this._options, this._onNativeResponse);
request._redirectable = this;
for (var event of events) {
request.on(event, eventHandlers[event]);
}
// RFC7230§5.3.1: When making a request directly to an origin server, […]
// a client MUST send only the absolute path […] as the request-target.
this._currentUrl = /^\//.test(this._options.path) ?
url.format(this._options) :
// When making a request to a proxy, […]
// a client MUST send the target URI in absolute-form […].
this._options.path;
// End a redirected request
// (The first request must be ended explicitly with RedirectableRequest#end)
if (this._isRedirect) {
// Write the request entity and end
var i = 0;
var self = this;
var buffers = this._requestBodyBuffers;
(function writeNext(error) {
// Only write if this request has not been redirected yet
/* istanbul ignore else */
if (request === self._currentRequest) {
// Report any write errors
/* istanbul ignore if */
if (error) {
self.emit("error", error);
}
// Write the next buffer if there are still left
else if (i < buffers.length) {
var buffer = buffers[i++];
/* istanbul ignore else */
if (!request.finished) {
request.write(buffer.data, buffer.encoding, writeNext);
}
}
// End the request if `end` has been called on us
else if (self._ended) {
request.end();
}
}
}());
}
};
// Processes a response from the current native request
RedirectableRequest.prototype._processResponse = function (response) {
// Store the redirected response
var statusCode = response.statusCode;
if (this._options.trackRedirects) {
this._redirects.push({
url: this._currentUrl,
headers: response.headers,
statusCode: statusCode,
});
}
// RFC7231§6.4: The 3xx (Redirection) class of status code indicates
// that further action needs to be taken by the user agent in order to
// fulfill the request. If a Location header field is provided,
// the user agent MAY automatically redirect its request to the URI
// referenced by the Location field value,
// even if the specific status code is not understood.
// If the response is not a redirect; return it as-is
var location = response.headers.location;
if (!location || this._options.followRedirects === false ||
statusCode < 300 || statusCode >= 400) {
response.responseUrl = this._currentUrl;
response.redirects = this._redirects;
this.emit("response", response);
// Clean up
this._requestBodyBuffers = [];
return;
}
// The response is a redirect, so abort the current request
destroyRequest(this._currentRequest);
// Discard the remainder of the response to avoid waiting for data
response.destroy();
// RFC7231§6.4: A client SHOULD detect and intervene
// in cyclical redirections (i.e., "infinite" redirection loops).
if (++this._redirectCount > this._options.maxRedirects) {
throw new TooManyRedirectsError();
}
// Store the request headers if applicable
var requestHeaders;
var beforeRedirect = this._options.beforeRedirect;
if (beforeRedirect) {
requestHeaders = Object.assign({
// The Host header was set by nativeProtocol.request
Host: response.req.getHeader("host"),
}, this._options.headers);
}
// RFC7231§6.4: Automatic redirection needs to done with
// care for methods not known to be safe, […]
// RFC7231§6.4.2–3: For historical reasons, a user agent MAY change
// the request method from POST to GET for the subsequent request.
var method = this._options.method;
if ((statusCode === 301 || statusCode === 302) && this._options.method === "POST" ||
// RFC7231§6.4.4: The 303 (See Other) status code indicates that
// the server is redirecting the user agent to a different resource […]
// A user agent can perform a retrieval request targeting that URI
// (a GET or HEAD request if using HTTP) […]
(statusCode === 303) && !/^(?:GET|HEAD)$/.test(this._options.method)) {
this._options.method = "GET";
// Drop a possible entity and headers related to it
this._requestBodyBuffers = [];
removeMatchingHeaders(/^content-/i, this._options.headers);
}
// Drop the Host header, as the redirect might lead to a different host
var currentHostHeader = removeMatchingHeaders(/^host$/i, this._options.headers);
// If the redirect is relative, carry over the host of the last request
var currentUrlParts = parseUrl(this._currentUrl);
var currentHost = currentHostHeader || currentUrlParts.host;
var currentUrl = /^\w+:/.test(location) ? this._currentUrl :
url.format(Object.assign(currentUrlParts, { host: currentHost }));
// Create the redirected request
var redirectUrl = resolveUrl(location, currentUrl);
debug$5("redirecting to", redirectUrl.href);
this._isRedirect = true;
spreadUrlObject(redirectUrl, this._options);
// Drop confidential headers when redirecting to a less secure protocol
// or to a different domain that is not a superdomain
if (redirectUrl.protocol !== currentUrlParts.protocol &&
redirectUrl.protocol !== "https:" ||
redirectUrl.host !== currentHost &&
!isSubdomain(redirectUrl.host, currentHost)) {
removeMatchingHeaders(/^(?:(?:proxy-)?authorization|cookie)$/i, this._options.headers);
}
// Evaluate the beforeRedirect callback
if (isFunction(beforeRedirect)) {
var responseDetails = {
headers: response.headers,
statusCode: statusCode,
};
var requestDetails = {
url: currentUrl,
method: method,
headers: requestHeaders,
};
beforeRedirect(this._options, responseDetails, requestDetails);
this._sanitizeOptions(this._options);
}
// Perform the redirected request
this._performRequest();
};
// Wraps the key/value object of protocols with redirect functionality
function wrap(protocols) {
// Default settings
var exports = {
maxRedirects: 21,
maxBodyLength: 10 * 1024 * 1024,
};
// Wrap each protocol
var nativeProtocols = {};
Object.keys(protocols).forEach(function (scheme) {
var protocol = scheme + ":";
var nativeProtocol = nativeProtocols[protocol] = protocols[scheme];
var wrappedProtocol = exports[scheme] = Object.create(nativeProtocol);
// Executes a request, following redirects
function request(input, options, callback) {
// Parse parameters, ensuring that input is an object
if (isURL(input)) {
input = spreadUrlObject(input);
}
else if (isString(input)) {
input = spreadUrlObject(parseUrl(input));
}
else {
callback = options;
options = validateUrl(input);
input = { protocol: protocol };
}
if (isFunction(options)) {
callback = options;
options = null;
}
// Set defaults
options = Object.assign({
maxRedirects: exports.maxRedirects,
maxBodyLength: exports.maxBodyLength,
}, input, options);
options.nativeProtocols = nativeProtocols;
if (!isString(options.host) && !isString(options.hostname)) {
options.hostname = "::1";
}
assert.equal(options.protocol, protocol, "protocol mismatch");
debug$5("options", options);
return new RedirectableRequest(options, callback);
}
// Executes a GET request, following redirects
function get(input, options, callback) {
var wrappedRequest = wrappedProtocol.request(input, options, callback);
wrappedRequest.end();
return wrappedRequest;
}
// Expose the properties on the wrapped protocol
Object.defineProperties(wrappedProtocol, {
request: { value: request, configurable: true, enumerable: true, writable: true },
get: { value: get, configurable: true, enumerable: true, writable: true },
});
});
return exports;
}
function noop() { /* empty */ }
function parseUrl(input) {
var parsed;
/* istanbul ignore else */
if (useNativeURL) {
parsed = new URL$1(input);
}
else {
// Ensure the URL is valid and absolute
parsed = validateUrl(url.parse(input));
if (!isString(parsed.protocol)) {
throw new InvalidUrlError({ input });
}
}
return parsed;
}
function resolveUrl(relative, base) {
/* istanbul ignore next */
return useNativeURL ? new URL$1(relative, base) : parseUrl(url.resolve(base, relative));
}
function validateUrl(input) {
if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) {
throw new InvalidUrlError({ input: input.href || input });
}
if (/^\[/.test(input.host) && !/^\[[:0-9a-f]+\](:\d+)?$/i.test(input.host)) {
throw new InvalidUrlError({ input: input.href || input });
}
return input;
}
function spreadUrlObject(urlObject, target) {
var spread = target || {};
for (var key of preservedUrlFields) {
spread[key] = urlObject[key];
}
// Fix IPv6 hostname
if (spread.hostname.startsWith("[")) {
spread.hostname = spread.hostname.slice(1, -1);
}
// Ensure port is a number
if (spread.port !== "") {
spread.port = Number(spread.port);
}
// Concatenate path
spread.path = spread.search ? spread.pathname + spread.search : spread.pathname;
return spread;
}
function removeMatchingHeaders(regex, headers) {
var lastValue;
for (var header in headers) {
if (regex.test(header)) {
lastValue = headers[header];
delete headers[header];
}
}
return (lastValue === null || typeof lastValue === "undefined") ?
undefined : String(lastValue).trim();
}
function createErrorType(code, message, baseClass) {
// Create constructor
function CustomError(properties) {
Error.captureStackTrace(this, this.constructor);
Object.assign(this, properties || {});
this.code = code;
this.message = this.cause ? message + ": " + this.cause.message : message;
}
// Attach constructor and set default properties
CustomError.prototype = new (baseClass || Error)();
Object.defineProperties(CustomError.prototype, {
constructor: {
value: CustomError,
enumerable: false,
},
name: {
value: "Error [" + code + "]",
enumerable: false,
},
});
return CustomError;
}
function destroyRequest(request, error) {
for (var event of events) {
request.removeListener(event, eventHandlers[event]);
}
request.on("error", noop);
request.destroy(error);
}
function isSubdomain(subdomain, domain) {
assert(isString(subdomain) && isString(domain));
var dot = subdomain.length - domain.length - 1;
return dot > 0 && subdomain[dot] === "." && subdomain.endsWith(domain);
}
function isString(value) {
return typeof value === "string" || value instanceof String;
}
function isFunction(value) {
return typeof value === "function";
}
function isBuffer(value) {
return typeof value === "object" && ("length" in value);
}
function isURL(value) {
return URL$1 && value instanceof URL$1;
}
// Exports
followRedirects$1.exports = wrap({ http: http$1, https: https$1 });
followRedirects$1.exports.wrap = wrap;
var followRedirectsExports = followRedirects$1.exports;
var httpNative = require$$1,
httpsNative = require$$1$1,
web_o = webOutgoing,
common$1 = common$3,
followRedirects = followRedirectsExports;
web_o = Object.keys(web_o).map(function(pass) {
return web_o[pass];
});
var nativeAgents = { http: httpNative, https: httpsNative };
/*!
* Array of passes.
*
* A `pass` is just a function that is executed on `req, res, options`
* so that you can easily add new checks while still keeping the base
* flexible.
*/
var webIncoming = {
/**
* Sets `content-length` to '0' if request is of DELETE type.
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {Object} Options Config object passed to the proxy
*
* @api private
*/
deleteLength: function deleteLength(req, res, options) {
if((req.method === 'DELETE' || req.method === 'OPTIONS')
&& !req.headers['content-length']) {
req.headers['content-length'] = '0';
delete req.headers['transfer-encoding'];
}
},
/**
* Sets timeout in request socket if it was specified in options.
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {Object} Options Config object passed to the proxy
*
* @api private
*/
timeout: function timeout(req, res, options) {
if(options.timeout) {
req.socket.setTimeout(options.timeout);
}
},
/**
* Sets `x-forwarded-*` headers if specified in config.
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {Object} Options Config object passed to the proxy
*
* @api private
*/
XHeaders: function XHeaders(req, res, options) {
if(!options.xfwd) return;
var encrypted = req.isSpdy || common$1.hasEncryptedConnection(req);
var values = {
for : req.connection.remoteAddress || req.socket.remoteAddress,
port : common$1.getPort(req),
proto: encrypted ? 'https' : 'http'
};
['for', 'port', 'proto'].forEach(function(header) {
req.headers['x-forwarded-' + header] =
(req.headers['x-forwarded-' + header] || '') +
(req.headers['x-forwarded-' + header] ? ',' : '') +
values[header];
});
req.headers['x-forwarded-host'] = req.headers['x-forwarded-host'] || req.headers['host'] || '';
},
/**
* Does the actual proxying. If `forward` is enabled fires up
* a ForwardStream, same happens for ProxyStream. The request
* just dies otherwise.
*
* @param {ClientRequest} Req Request object
* @param {IncomingMessage} Res Response object
* @param {Object} Options Config object passed to the proxy
*
* @api private
*/
stream: function stream(req, res, options, _, server, clb) {
// And we begin!
server.emit('start', req, res, options.target || options.forward);
var agents = options.followRedirects ? followRedirects : nativeAgents;
var http = agents.http;
var https = agents.https;
if(options.forward) {
// If forward enable, so just pipe the request
var forwardReq = (options.forward.protocol === 'https:' ? https : http).request(
common$1.setupOutgoing(options.ssl || {}, options, req, 'forward')
);
// error handler (e.g. ECONNRESET, ECONNREFUSED)
// Handle errors on incoming request as well as it makes sense to
var forwardError = createErrorHandler(forwardReq, options.forward);
req.on('error', forwardError);
forwardReq.on('error', forwardError);
(options.buffer || req).pipe(forwardReq);
if(!options.target) { return res.end(); }
}
// Request initalization
var proxyReq = (options.target.protocol === 'https:' ? https : http).request(
common$1.setupOutgoing(options.ssl || {}, options, req)
);
// Enable developers to modify the proxyReq before headers are sent
proxyReq.on('socket', function(socket) {
if(server && !proxyReq.getHeader('expect')) {
server.emit('proxyReq', proxyReq, req, res, options);
}
});
// allow outgoing socket to timeout so that we could
// show an error page at the initial request
if(options.proxyTimeout) {
proxyReq.setTimeout(options.proxyTimeout, function() {
proxyReq.abort();
});
}
// Ensure we abort proxy if request is aborted
req.on('aborted', function () {
proxyReq.abort();
});
// handle errors in proxy and incoming request, just like for forward proxy
var proxyError = createErrorHandler(proxyReq, options.target);
req.on('error', proxyError);
proxyReq.on('error', proxyError);
function createErrorHandler(proxyReq, url) {
return function proxyError(err) {
if (req.socket.destroyed && err.code === 'ECONNRESET') {
server.emit('econnreset', err, req, res, url);
return proxyReq.abort();
}
if (clb) {
clb(err, req, res, url);
} else {
server.emit('error', err, req, res, url);
}
}
}
(options.buffer || req).pipe(proxyReq);
proxyReq.on('response', function(proxyRes) {
if(server) { server.emit('proxyRes', proxyRes, req, res); }
if(!res.headersSent && !options.selfHandleResponse) {
for(var i=0; i < web_o.length; i++) {
if(web_o[i](req, res, proxyRes, options)) { break; }
}
}
if (!res.finished) {
// Allow us to listen when the proxy has completed
proxyRes.on('end', function () {
if (server) server.emit('end', req, res, proxyRes);
});
// We pipe to the response unless its expected to be handled by the user
if (!options.selfHandleResponse) proxyRes.pipe(res);
} else {
if (server) server.emit('end', req, res, proxyRes);
}
});
}
};
var http = require$$1,
https = require$$1$1,
common = common$3;
/*!
* Array of passes.
*
* A `pass` is just a function that is executed on `req, socket, options`
* so that you can easily add new checks while still keeping the base
* flexible.
*/
/*
* Websockets Passes
*
*/
var wsIncoming = {
/**
* WebSocket requests must have the `GET` method and
* the `upgrade:websocket` header
*
* @param {ClientRequest} Req Request object
* @param {Socket} Websocket
*
* @api private
*/
checkMethodAndHeader : function checkMethodAndHeader(req, socket) {
if (req.method !== 'GET' || !req.headers.upgrade) {
socket.destroy();
return true;
}
if (req.headers.upgrade.toLowerCase() !== 'websocket') {
socket.destroy();
return true;
}
},
/**
* Sets `x-forwarded-*` headers if specified in config.
*
* @param {ClientRequest} Req Request object
* @param {Socket} Websocket
* @param {Object} Options Config object passed to the proxy
*
* @api private
*/
XHeaders : function XHeaders(req, socket, options) {
if(!options.xfwd) return;
var values = {
for : req.connection.remoteAddress || req.socket.remoteAddress,
port : common.getPort(req),
proto: common.hasEncryptedConnection(req) ? 'wss' : 'ws'
};
['for', 'port', 'proto'].forEach(function(header) {
req.headers['x-forwarded-' + header] =
(req.headers['x-forwarded-' + header] || '') +
(req.headers['x-forwarded-' + header] ? ',' : '') +
values[header];
});
},
/**
* Does the actual proxying. Make the request and upgrade it
* send the Switching Protocols request and pipe the sockets.
*
* @param {ClientRequest} Req Request object
* @param {Socket} Websocket
* @param {Object} Options Config object passed to the proxy
*
* @api private
*/
stream : function stream(req, socket, options, head, server, clb) {
var createHttpHeader = function(line, headers) {
return Object.keys(headers).reduce(function (head, key) {
var value = headers[key];
if (!Array.isArray(value)) {
head.push(key + ': ' + value);
return head;
}
for (var i = 0; i < value.length; i++) {
head.push(key + ': ' + value[i]);
}
return head;
}, [line])
.join('\r\n') + '\r\n\r\n';
};
common.setupSocket(socket);
if (head && head.length) socket.unshift(head);
var proxyReq = (common.isSSL.test(options.target.protocol) ? https : http).request(
common.setupOutgoing(options.ssl || {}, options, req)
);
// Enable developers to modify the proxyReq before headers are sent
if (server) { server.emit('proxyReqWs', proxyReq, req, socket, options, head); }
// Error Handler
proxyReq.on('error', onOutgoingError);
proxyReq.on('response', function (res) {
// if upgrade event isn't going to happen, close the socket
if (!res.upgrade) {
socket.write(createHttpHeader('HTTP/' + res.httpVersion + ' ' + res.statusCode + ' ' + res.statusMessage, res.headers));
res.pipe(socket);
}
});
proxyReq.on('upgrade', function(proxyRes, proxySocket, proxyHead) {
proxySocket.on('error', onOutgoingError);
// Allow us to listen when the websocket has completed
proxySocket.on('end', function () {
server.emit('close', proxyRes, proxySocket, proxyHead);
});
// The pipe below will end proxySocket if socket closes cleanly, but not
// if it errors (eg, vanishes from the net and starts returning
// EHOSTUNREACH). We need to do that explicitly.
socket.on('error', function () {
proxySocket.end();
});
common.setupSocket(proxySocket);
if (proxyHead && proxyHead.length) proxySocket.unshift(proxyHead);
//
// Remark: Handle writing the headers to the socket when switching protocols
// Also handles when a header is an array
//
socket.write(createHttpHeader('HTTP/1.1 101 Switching Protocols', proxyRes.headers));
proxySocket.pipe(socket).pipe(proxySocket);
server.emit('open', proxySocket);
server.emit('proxySocket', proxySocket); //DEPRECATED.
});
return proxyReq.end(); // XXX: CHECK IF THIS IS THIS CORRECT
function onOutgoingError(err) {
if (clb) {
clb(err, req, socket);
} else {
server.emit('error', err, req, socket);
}
socket.end();
}
}
};
(function (module) {
var httpProxy = module.exports,
parse_url = require$$0$9.parse,
EE3 = eventemitter3Exports,
http = require$$1,
https = require$$1$1,
web = webIncoming,
ws = wsIncoming;
httpProxy.Server = ProxyServer;
/**
* Returns a function that creates the loader for
* either `ws` or `web`'s passes.
*
* Examples:
*
* httpProxy.createRightProxy('ws')
* // => [Function]
*
* @param {String} Type Either 'ws' or 'web'
*
* @return {Function} Loader Function that when called returns an iterator for the right passes
*
* @api private
*/
function createRightProxy(type) {
return function(options) {
return function(req, res /*, [head], [opts] */) {
var passes = (type === 'ws') ? this.wsPasses : this.webPasses,
args = [].slice.call(arguments),
cntr = args.length - 1,
head, cbl;
/* optional args parse begin */
if(typeof args[cntr] === 'function') {
cbl = args[cntr];
cntr--;
}
var requestOptions = options;
if(
!(args[cntr] instanceof Buffer) &&
args[cntr] !== res
) {
//Copy global options
requestOptions = Object.assign({}, options);
//Overwrite with request options
Object.assign(requestOptions, args[cntr]);
cntr--;
}
if(args[cntr] instanceof Buffer) {
head = args[cntr];
}
/* optional args parse end */
['target', 'forward'].forEach(function(e) {
if (typeof requestOptions[e] === 'string')
requestOptions[e] = parse_url(requestOptions[e]);
});
if (!requestOptions.target && !requestOptions.forward) {
return this.emit('error', new Error('Must provide a proper URL as target'));
}
for(var i=0; i < passes.length; i++) {
/**
* Call of passes functions
* pass(req, res, options, head)
*
* In WebSockets case the `res` variable
* refer to the connection socket
* pass(req, socket, options, head)
*/
if(passes[i](req, res, requestOptions, head, this, cbl)) { // passes can return a truthy value to halt the loop
break;
}
}
};
};
}
httpProxy.createRightProxy = createRightProxy;
function ProxyServer(options) {
EE3.call(this);
options = options || {};
options.prependPath = options.prependPath === false ? false : true;
this.web = this.proxyRequest = createRightProxy('web')(options);
this.ws = this.proxyWebsocketRequest = createRightProxy('ws')(options);
this.options = options;
this.webPasses = Object.keys(web).map(function(pass) {
return web[pass];
});
this.wsPasses = Object.keys(ws).map(function(pass) {
return ws[pass];
});
this.on('error', this.onError, this);
}
require$$0$5.inherits(ProxyServer, EE3);
ProxyServer.prototype.onError = function (err) {
//
// Remark: Replicate node core behavior using EE3
// so we force people to handle their own errors
//
if(this.listeners('error').length === 1) {
throw err;
}
};
ProxyServer.prototype.listen = function(port, hostname) {
var self = this,
closure = function(req, res) { self.web(req, res); };
this._server = this.options.ssl ?
https.createServer(this.options.ssl, closure) :
http.createServer(closure);
if(this.options.ws) {
this._server.on('upgrade', function(req, socket, head) { self.ws(req, socket, head); });
}
this._server.listen(port, hostname);
return this;
};
ProxyServer.prototype.close = function(callback) {
var self = this;
if (this._server) {
this._server.close(done);
}
// Wrap callback to nullify server after all open connections are closed.
function done() {
self._server = null;
if (callback) {
callback.apply(null, arguments);
}
} };
ProxyServer.prototype.before = function(type, passName, callback) {
if (type !== 'ws' && type !== 'web') {
throw new Error('type must be `web` or `ws`');
}
var passes = (type === 'ws') ? this.wsPasses : this.webPasses,
i = false;
passes.forEach(function(v, idx) {
if(v.name === passName) i = idx;
});
if(i === false) throw new Error('No such pass');
passes.splice(i, 0, callback);
};
ProxyServer.prototype.after = function(type, passName, callback) {
if (type !== 'ws' && type !== 'web') {
throw new Error('type must be `web` or `ws`');
}
var passes = (type === 'ws') ? this.wsPasses : this.webPasses,
i = false;
passes.forEach(function(v, idx) {
if(v.name === passName) i = idx;
});
if(i === false) throw new Error('No such pass');
passes.splice(i++, 0, callback);
};
} (httpProxy$3));
var httpProxyExports = httpProxy$3.exports;
// Use explicit /index.js to help browserify negociation in require '/lib/http-proxy' (!)
var ProxyServer = httpProxyExports.Server;
/**
* Creates the proxy server.
*
* Examples:
*
* httpProxy.createProxyServer({ .. }, 8000)
* // => '{ web: [Function], ws: [Function] ... }'
*
* @param {Object} Options Config object passed to the proxy
*
* @return {Object} Proxy Proxy object with handlers for `ws` and `web` requests
*
* @api public
*/
function createProxyServer(options) {
/*
* `options` is needed and it must have the following layout:
*
* {
* target : <url string to be parsed with the url module>
* forward: <url string to be parsed with the url module>
* agent : <object to be passed to http(s).request>
* ssl : <object to be passed to https.createServer()>
* ws : <true/false, if you want to proxy websockets>
* xfwd : <true/false, adds x-forward headers>
* secure : <true/false, verify SSL certificate>
* toProxy: <true/false, explicitly specify if we are proxying to another proxy>
* prependPath: <true/false, Default: true - specify whether you want to prepend the target's path to the proxy path>
* ignorePath: <true/false, Default: false - specify whether you want to ignore the proxy path of the incoming request>
* localAddress : <Local interface string to bind for outgoing connections>
* changeOrigin: <true/false, Default: false - changes the origin of the host header to the target URL>
* preserveHeaderKeyCase: <true/false, Default: false - specify whether you want to keep letter case of response header key >
* auth : Basic authentication i.e. 'user:password' to compute an Authorization header.
* hostRewrite: rewrites the location hostname on (201/301/302/307/308) redirects, Default: null.
* autoRewrite: rewrites the location host/port on (201/301/302/307/308) redirects based on requested host/port. Default: false.
* protocolRewrite: rewrites the location protocol on (201/301/302/307/308) redirects to 'http' or 'https'. Default: null.
* }
*
* NOTE: `options.ws` and `options.ssl` are optional.
* `options.target and `options.forward` cannot be
* both missing
* }
*/
return new ProxyServer(options);
}
ProxyServer.createProxyServer = createProxyServer;
ProxyServer.createServer = createProxyServer;
ProxyServer.createProxy = createProxyServer;
/**
* Export the proxy "Server" as the main export.
*/
var httpProxy$2 = ProxyServer;
/*!
* Caron dimonio, con occhi di bragia
* loro accennando, tutte le raccoglie;
* batte col remo qualunque s’adagia
*
* Charon the demon, with the eyes of glede,
* Beckoning to them, collects them all together,
* Beats with his oar whoever lags behind
*
* Dante - The Divine Comedy (Canto III)
*/
var httpProxy = httpProxy$2;
var httpProxy$1 = /*@__PURE__*/getDefaultExportFromCjs(httpProxy);
const debug$4 = createDebugger("vite:proxy");
const rewriteOriginHeader = (proxyReq, options, config) => {
if (options.rewriteWsOrigin) {
const { target } = options;
if (proxyReq.headersSent) {
config.logger.warn(
colors$1.yellow(
`Unable to rewrite Origin header as headers are already sent.`
)
);
return;
}
if (proxyReq.getHeader("origin") && target) {
const changedOrigin = typeof target === "object" ? `${target.protocol}//${target.host}` : target;
proxyReq.setHeader("origin", changedOrigin);
}
}
};
function proxyMiddleware(httpServer, options, config) {
const proxies = {};
Object.keys(options).forEach((context) => {
let opts = options[context];
if (!opts) {
return;
}
if (typeof opts === "string") {
opts = { target: opts, changeOrigin: true };
}
const proxy = httpProxy$1.createProxyServer(opts);
if (opts.configure) {
opts.configure(proxy, opts);
}
proxy.on("error", (err, req, originalRes) => {
const res = originalRes;
if (!res) {
config.logger.error(
`${colors$1.red(`http proxy error: ${err.message}`)}
${err.stack}`,
{
timestamp: true,
error: err
}
);
} else if ("req" in res) {
config.logger.error(
`${colors$1.red(`http proxy error: ${originalRes.req.url}`)}
${err.stack}`,
{
timestamp: true,
error: err
}
);
if (!res.headersSent && !res.writableEnded) {
res.writeHead(500, {
"Content-Type": "text/plain"
}).end();
}
} else {
config.logger.error(`${colors$1.red(`ws proxy error:`)}
${err.stack}`, {
timestamp: true,
error: err
});
res.end();
}
});
proxy.on("proxyReqWs", (proxyReq, req, socket, options2, head) => {
rewriteOriginHeader(proxyReq, options2, config);
socket.on("error", (err) => {
config.logger.error(
`${colors$1.red(`ws proxy socket error:`)}
${err.stack}`,
{
timestamp: true,
error: err
}
);
});
});
proxy.on("proxyRes", (proxyRes, req, res) => {
res.on("close", () => {
if (!res.writableEnded) {
debug$4?.("destroying proxyRes in proxyRes close event");
proxyRes.destroy();
}
});
});
proxies[context] = [proxy, { ...opts }];
});
if (httpServer) {
httpServer.on("upgrade", (req, socket, head) => {
const url = req.url;
for (const context in proxies) {
if (doesProxyContextMatchUrl(context, url)) {
const [proxy, opts] = proxies[context];
if (opts.ws || opts.target?.toString().startsWith("ws:") || opts.target?.toString().startsWith("wss:")) {
if (opts.rewrite) {
req.url = opts.rewrite(url);
}
debug$4?.(`${req.url} -> ws ${opts.target}`);
proxy.ws(req, socket, head);
return;
}
}
}
});
}
return function viteProxyMiddleware(req, res, next) {
const url = req.url;
for (const context in proxies) {
if (doesProxyContextMatchUrl(context, url)) {
const [proxy, opts] = proxies[context];
const options2 = {};
if (opts.bypass) {
const bypassResult = opts.bypass(req, res, opts);
if (typeof bypassResult === "string") {
req.url = bypassResult;
debug$4?.(`bypass: ${req.url} -> ${bypassResult}`);
return next();
} else if (bypassResult === false) {
debug$4?.(`bypass: ${req.url} -> 404`);
res.statusCode = 404;
return res.end();
}
}
debug$4?.(`${req.url} -> ${opts.target || opts.forward}`);
if (opts.rewrite) {
req.url = opts.rewrite(req.url);
}
proxy.web(req, res, options2);
return;
}
}
next();
};
}
function doesProxyContextMatchUrl(context, url) {
return context[0] === "^" && new RegExp(context).test(url) || url.startsWith(context);
}
const debug$3 = createDebugger("vite:html-fallback");
function htmlFallbackMiddleware(root, spaFallback, fsUtils = commonFsUtils) {
return function viteHtmlFallbackMiddleware(req, res, next) {
if (
// Only accept GET or HEAD
req.method !== "GET" && req.method !== "HEAD" || // Exclude default favicon requests
req.url === "/favicon.ico" || // Require Accept: text/html or */*
!(req.headers.accept === void 0 || // equivalent to `Accept: */*`
req.headers.accept === "" || // equivalent to `Accept: */*`
req.headers.accept.includes("text/html") || req.headers.accept.includes("*/*"))
) {
return next();
}
const url = cleanUrl(req.url);
const pathname = decodeURIComponent(url);
if (pathname.endsWith(".html")) {
const filePath = path$n.join(root, pathname);
if (fsUtils.existsSync(filePath)) {
debug$3?.(`Rewriting ${req.method} ${req.url} to ${url}`);
req.url = url;
return next();
}
} else if (pathname[pathname.length - 1] === "/") {
const filePath = path$n.join(root, pathname, "index.html");
if (fsUtils.existsSync(filePath)) {
const newUrl = url + "index.html";
debug$3?.(`Rewriting ${req.method} ${req.url} to ${newUrl}`);
req.url = newUrl;
return next();
}
} else {
const filePath = path$n.join(root, pathname + ".html");
if (fsUtils.existsSync(filePath)) {
const newUrl = url + ".html";
debug$3?.(`Rewriting ${req.method} ${req.url} to ${newUrl}`);
req.url = newUrl;
return next();
}
}
if (spaFallback) {
debug$3?.(`Rewriting ${req.method} ${req.url} to /index.html`);
req.url = "/index.html";
}
next();
};
}
const debug$2 = createDebugger("vite:send", {
onlyWhenFocused: true
});
const alias = {
js: "text/javascript",
css: "text/css",
html: "text/html",
json: "application/json"
};
function send(req, res, content, type, options) {
const {
etag = getEtag(content, { weak: true }),
cacheControl = "no-cache",
headers,
map
} = options;
if (res.writableEnded) {
return;
}
if (req.headers["if-none-match"] === etag) {
res.statusCode = 304;
res.end();
return;
}
res.setHeader("Content-Type", alias[type] || type);
res.setHeader("Cache-Control", cacheControl);
res.setHeader("Etag", etag);
if (headers) {
for (const name in headers) {
res.setHeader(name, headers[name]);
}
}
if (map && "version" in map && map.mappings) {
if (type === "js" || type === "css") {
content = getCodeWithSourcemap(type, content.toString(), map);
}
} else if (type === "js" && (!map || map.mappings !== "")) {
const code = content.toString();
if (convertSourceMap.mapFileCommentRegex.test(code)) {
debug$2?.(`Skipped injecting fallback sourcemap for ${req.url}`);
} else {
const urlWithoutTimestamp = removeTimestampQuery(req.url);
const ms = new MagicString(code);
content = getCodeWithSourcemap(
type,
code,
ms.generateMap({
source: path$n.basename(urlWithoutTimestamp),
hires: "boundary",
includeContent: true
})
);
}
}
res.statusCode = 200;
res.end(content);
return;
}
const debugCache = createDebugger("vite:cache");
const knownIgnoreList = /* @__PURE__ */ new Set(["/", "/favicon.ico"]);
function cachedTransformMiddleware(server) {
return function viteCachedTransformMiddleware(req, res, next) {
const ifNoneMatch = req.headers["if-none-match"];
if (ifNoneMatch) {
const moduleByEtag = server.moduleGraph.getModuleByEtag(ifNoneMatch);
if (moduleByEtag?.transformResult?.etag === ifNoneMatch) {
const maybeMixedEtag = isCSSRequest(req.url);
if (!maybeMixedEtag) {
debugCache?.(`[304] ${prettifyUrl(req.url, server.config.root)}`);
res.statusCode = 304;
return res.end();
}
}
}
next();
};
}
function transformMiddleware(server) {
const { root, publicDir } = server.config;
const publicDirInRoot = publicDir.startsWith(withTrailingSlash(root));
const publicPath = `${publicDir.slice(root.length)}/`;
return async function viteTransformMiddleware(req, res, next) {
if (req.method !== "GET" || knownIgnoreList.has(req.url)) {
return next();
}
let url;
try {
url = decodeURI(removeTimestampQuery(req.url)).replace(
NULL_BYTE_PLACEHOLDER,
"\0"
);
} catch (e) {
return next(e);
}
const withoutQuery = cleanUrl(url);
try {
const isSourceMap = withoutQuery.endsWith(".map");
if (isSourceMap) {
const depsOptimizer = getDepsOptimizer(server.config, false);
if (depsOptimizer?.isOptimizedDepUrl(url)) {
const sourcemapPath = url.startsWith(FS_PREFIX) ? fsPathFromId(url) : normalizePath$3(path$n.resolve(server.config.root, url.slice(1)));
try {
const map = JSON.parse(
await fsp.readFile(sourcemapPath, "utf-8")
);
applySourcemapIgnoreList(
map,
sourcemapPath,
server.config.server.sourcemapIgnoreList,
server.config.logger
);
return send(req, res, JSON.stringify(map), "json", {
headers: server.config.server.headers
});
} catch (e) {
const dummySourceMap = {
version: 3,
file: sourcemapPath.replace(/\.map$/, ""),
sources: [],
sourcesContent: [],
names: [],
mappings: ";;;;;;;;;"
};
return send(req, res, JSON.stringify(dummySourceMap), "json", {
cacheControl: "no-cache",
headers: server.config.server.headers
});
}
} else {
const originalUrl = url.replace(/\.map($|\?)/, "$1");
const map = (await server.moduleGraph.getModuleByUrl(originalUrl, false))?.transformResult?.map;
if (map) {
return send(req, res, JSON.stringify(map), "json", {
headers: server.config.server.headers
});
} else {
return next();
}
}
}
if (publicDirInRoot && url.startsWith(publicPath)) {
warnAboutExplicitPublicPathInUrl(url);
}
if (isJSRequest(url) || isImportRequest(url) || isCSSRequest(url) || isHTMLProxy(url)) {
url = removeImportQuery(url);
url = unwrapId$1(url);
if (isCSSRequest(url)) {
if (req.headers.accept?.includes("text/css") && !isDirectRequest(url)) {
url = injectQuery(url, "direct");
}
const ifNoneMatch = req.headers["if-none-match"];
if (ifNoneMatch && (await server.moduleGraph.getModuleByUrl(url, false))?.transformResult?.etag === ifNoneMatch) {
debugCache?.(`[304] ${prettifyUrl(url, server.config.root)}`);
res.statusCode = 304;
return res.end();
}
}
const result = await transformRequest(url, server, {
html: req.headers.accept?.includes("text/html")
});
if (result) {
const depsOptimizer = getDepsOptimizer(server.config, false);
const type = isDirectCSSRequest(url) ? "css" : "js";
const isDep = DEP_VERSION_RE.test(url) || depsOptimizer?.isOptimizedDepUrl(url);
return send(req, res, result.code, type, {
etag: result.etag,
// allow browser to cache npm deps!
cacheControl: isDep ? "max-age=31536000,immutable" : "no-cache",
headers: server.config.server.headers,
map: result.map
});
}
}
} catch (e) {
if (e?.code === ERR_OPTIMIZE_DEPS_PROCESSING_ERROR) {
if (!res.writableEnded) {
res.statusCode = 504;
res.statusMessage = "Optimize Deps Processing Error";
res.end();
}
server.config.logger.error(e.message);
return;
}
if (e?.code === ERR_OUTDATED_OPTIMIZED_DEP) {
if (!res.writableEnded) {
res.statusCode = 504;
res.statusMessage = "Outdated Optimize Dep";
res.end();
}
return;
}
if (e?.code === ERR_CLOSED_SERVER) {
if (!res.writableEnded) {
res.statusCode = 504;
res.statusMessage = "Outdated Request";
res.end();
}
return;
}
if (e?.code === ERR_FILE_NOT_FOUND_IN_OPTIMIZED_DEP_DIR) {
if (!res.writableEnded) {
res.statusCode = 404;
res.end();
}
server.config.logger.warn(colors$1.yellow(e.message));
return;
}
if (e?.code === ERR_LOAD_URL) {
return next();
}
return next(e);
}
next();
};
function warnAboutExplicitPublicPathInUrl(url) {
let warning;
if (isImportRequest(url)) {
const rawUrl = removeImportQuery(url);
if (urlRE.test(url)) {
warning = `Assets in the public directory are served at the root path.
Instead of ${colors$1.cyan(rawUrl)}, use ${colors$1.cyan(
rawUrl.replace(publicPath, "/")
)}.`;
} else {
warning = `Assets in public directory cannot be imported from JavaScript.
If you intend to import that asset, put the file in the src directory, and use ${colors$1.cyan(
rawUrl.replace(publicPath, "/src/")
)} instead of ${colors$1.cyan(rawUrl)}.
If you intend to use the URL of that asset, use ${colors$1.cyan(
injectQuery(rawUrl.replace(publicPath, "/"), "url")
)}.`;
}
} else {
warning = `Files in the public directory are served at the root path.
Instead of ${colors$1.cyan(url)}, use ${colors$1.cyan(
url.replace(publicPath, "/")
)}.`;
}
server.config.logger.warn(colors$1.yellow(warning));
}
}
function createDevHtmlTransformFn(config) {
const [preHooks, normalHooks, postHooks] = resolveHtmlTransforms(
config.plugins,
config.logger
);
const transformHooks = [
preImportMapHook(config),
injectCspNonceMetaTagHook(config),
...preHooks,
htmlEnvHook(config),
devHtmlHook,
...normalHooks,
...postHooks,
injectNonceAttributeTagHook(config),
postImportMapHook()
];
return (server, url, html, originalUrl) => {
return applyHtmlTransforms(html, transformHooks, {
path: url,
filename: getHtmlFilename(url, server),
server,
originalUrl
});
};
}
function getHtmlFilename(url, server) {
if (url.startsWith(FS_PREFIX)) {
return decodeURIComponent(fsPathFromId(url));
} else {
return decodeURIComponent(
normalizePath$3(path$n.join(server.config.root, url.slice(1)))
);
}
}
function shouldPreTransform(url, config) {
return !checkPublicFile(url, config) && (isJSRequest(url) || isCSSRequest(url));
}
const wordCharRE = /\w/;
function isBareRelative(url) {
return wordCharRE.test(url[0]) && !url.includes(":");
}
const isSrcSet = (attr) => attr.name === "srcset" && attr.prefix === void 0;
const processNodeUrl = (url, useSrcSetReplacer, config, htmlPath, originalUrl, server, isClassicScriptLink) => {
const replacer = (url2) => {
if (server?.moduleGraph) {
const mod = server.moduleGraph.urlToModuleMap.get(url2);
if (mod && mod.lastHMRTimestamp > 0) {
url2 = injectQuery(url2, `t=${mod.lastHMRTimestamp}`);
}
}
if (url2[0] === "/" && url2[1] !== "/" || // #3230 if some request url (localhost:3000/a/b) return to fallback html, the relative assets
// path will add `/a/` prefix, it will caused 404.
//
// skip if url contains `:` as it implies a url protocol or Windows path that we don't want to replace.
//
// rewrite `./index.js` -> `localhost:5173/a/index.js`.
// rewrite `../index.js` -> `localhost:5173/index.js`.
// rewrite `relative/index.js` -> `localhost:5173/a/relative/index.js`.
(url2[0] === "." || isBareRelative(url2)) && originalUrl && originalUrl !== "/" && htmlPath === "/index.html") {
url2 = path$n.posix.join(config.base, url2);
}
if (server && !isClassicScriptLink && shouldPreTransform(url2, config)) {
let preTransformUrl;
if (url2[0] === "/" && url2[1] !== "/") {
preTransformUrl = url2;
} else if (url2[0] === "." || isBareRelative(url2)) {
preTransformUrl = path$n.posix.join(
config.base,
path$n.posix.dirname(htmlPath),
url2
);
}
if (preTransformUrl) {
try {
preTransformUrl = decodeURI(preTransformUrl);
} catch (err) {
return url2;
}
preTransformRequest(server, preTransformUrl, config.decodedBase);
}
}
return url2;
};
const processedUrl = useSrcSetReplacer ? processSrcSetSync(url, ({ url: url2 }) => replacer(url2)) : replacer(url);
return processedUrl;
};
const devHtmlHook = async (html, { path: htmlPath, filename, server, originalUrl }) => {
const { config, moduleGraph, watcher } = server;
const base = config.base || "/";
const decodedBase = config.decodedBase || "/";
let proxyModulePath;
let proxyModuleUrl;
const trailingSlash = htmlPath.endsWith("/");
if (!trailingSlash && getFsUtils(config).existsSync(filename)) {
proxyModulePath = htmlPath;
proxyModuleUrl = proxyModulePath;
} else {
const validPath = `${htmlPath}${trailingSlash ? "index.html" : ""}`;
proxyModulePath = `\0${validPath}`;
proxyModuleUrl = wrapId$1(proxyModulePath);
}
proxyModuleUrl = joinUrlSegments(decodedBase, proxyModuleUrl);
const s = new MagicString(html);
let inlineModuleIndex = -1;
const proxyCacheUrl = decodeURI(
cleanUrl(proxyModulePath).replace(normalizePath$3(config.root), "")
);
const styleUrl = [];
const inlineStyles = [];
const addInlineModule = (node, ext) => {
inlineModuleIndex++;
const contentNode = node.childNodes[0];
const code = contentNode.value;
let map;
if (proxyModulePath[0] !== "\0") {
map = new MagicString(html).snip(
contentNode.sourceCodeLocation.startOffset,
contentNode.sourceCodeLocation.endOffset
).generateMap({ hires: "boundary" });
map.sources = [filename];
map.file = filename;
}
addToHTMLProxyCache(config, proxyCacheUrl, inlineModuleIndex, { code, map });
const modulePath = `${proxyModuleUrl}?html-proxy&index=${inlineModuleIndex}.${ext}`;
const module = server?.moduleGraph.getModuleById(modulePath);
if (module) {
server?.moduleGraph.invalidateModule(module);
}
s.update(
node.sourceCodeLocation.startOffset,
node.sourceCodeLocation.endOffset,
`<script type="module" src="${modulePath}"><\/script>`
);
preTransformRequest(server, modulePath, decodedBase);
};
await traverseHtml(html, filename, (node) => {
if (!nodeIsElement(node)) {
return;
}
if (node.nodeName === "script") {
const { src, sourceCodeLocation, isModule } = getScriptInfo(node);
if (src) {
const processedUrl = processNodeUrl(
src.value,
isSrcSet(src),
config,
htmlPath,
originalUrl,
server,
!isModule
);
if (processedUrl !== src.value) {
overwriteAttrValue(s, sourceCodeLocation, processedUrl);
}
} else if (isModule && node.childNodes.length) {
addInlineModule(node, "js");
} else if (node.childNodes.length) {
const scriptNode = node.childNodes[node.childNodes.length - 1];
for (const {
url,
start,
end
} of extractImportExpressionFromClassicScript(scriptNode)) {
const processedUrl = processNodeUrl(
url,
false,
config,
htmlPath,
originalUrl
);
if (processedUrl !== url) {
s.update(start, end, processedUrl);
}
}
}
}
const inlineStyle = findNeedTransformStyleAttribute(node);
if (inlineStyle) {
inlineModuleIndex++;
inlineStyles.push({
index: inlineModuleIndex,
location: inlineStyle.location,
code: inlineStyle.attr.value
});
}
if (node.nodeName === "style" && node.childNodes.length) {
const children = node.childNodes[0];
styleUrl.push({
start: children.sourceCodeLocation.startOffset,
end: children.sourceCodeLocation.endOffset,
code: children.value
});
}
const assetAttrs = assetAttrsConfig[node.nodeName];
if (assetAttrs) {
for (const p of node.attrs) {
const attrKey = getAttrKey(p);
if (p.value && assetAttrs.includes(attrKey)) {
const processedUrl = processNodeUrl(
p.value,
isSrcSet(p),
config,
htmlPath,
originalUrl
);
if (processedUrl !== p.value) {
overwriteAttrValue(
s,
node.sourceCodeLocation.attrs[attrKey],
processedUrl
);
}
}
}
}
});
await Promise.all([
...styleUrl.map(async ({ start, end, code }, index) => {
const url = `${proxyModulePath}?html-proxy&direct&index=${index}.css`;
const mod = await moduleGraph.ensureEntryFromUrl(url, false);
ensureWatchedFile(watcher, mod.file, config.root);
const result = await server.pluginContainer.transform(code, mod.id);
let content = "";
if (result) {
if (result.map && "version" in result.map) {
if (result.map.mappings) {
await injectSourcesContent(
result.map,
proxyModulePath,
config.logger
);
}
content = getCodeWithSourcemap("css", result.code, result.map);
} else {
content = result.code;
}
}
s.overwrite(start, end, content);
}),
...inlineStyles.map(async ({ index, location, code }) => {
const url = `${proxyModulePath}?html-proxy&inline-css&style-attr&index=${index}.css`;
const mod = await moduleGraph.ensureEntryFromUrl(url, false);
ensureWatchedFile(watcher, mod.file, config.root);
await server?.pluginContainer.transform(code, mod.id);
const hash = getHash(cleanUrl(mod.id));
const result = htmlProxyResult.get(`${hash}_${index}`);
overwriteAttrValue(s, location, result ?? "");
})
]);
html = s.toString();
return {
html,
tags: [
{
tag: "script",
attrs: {
type: "module",
src: path$n.posix.join(base, CLIENT_PUBLIC_PATH)
},
injectTo: "head-prepend"
}
]
};
};
function indexHtmlMiddleware(root, server) {
const isDev = isDevServer(server);
const fsUtils = getFsUtils(server.config);
return async function viteIndexHtmlMiddleware(req, res, next) {
if (res.writableEnded) {
return next();
}
const url = req.url && cleanUrl(req.url);
if (url?.endsWith(".html") && req.headers["sec-fetch-dest"] !== "script") {
let filePath;
if (isDev && url.startsWith(FS_PREFIX)) {
filePath = decodeURIComponent(fsPathFromId(url));
} else {
filePath = path$n.join(root, decodeURIComponent(url));
}
if (fsUtils.existsSync(filePath)) {
const headers = isDev ? server.config.server.headers : server.config.preview.headers;
try {
let html = await fsp.readFile(filePath, "utf-8");
if (isDev) {
html = await server.transformIndexHtml(url, html, req.originalUrl);
}
return send(req, res, html, "html", { headers });
} catch (e) {
return next(e);
}
}
}
next();
};
}
function preTransformRequest(server, decodedUrl, decodedBase) {
if (!server.config.server.preTransformRequests) return;
decodedUrl = unwrapId$1(stripBase(decodedUrl, decodedBase));
server.warmupRequest(decodedUrl);
}
const logTime = createDebugger("vite:time");
function timeMiddleware(root) {
return function viteTimeMiddleware(req, res, next) {
const start = performance$1.now();
const end = res.end;
res.end = (...args) => {
logTime?.(`${timeFrom(start)} ${prettifyUrl(req.url, root)}`);
return end.call(res, ...args);
};
next();
};
}
class ModuleNode {
/**
* Public served url path, starts with /
*/
url;
/**
* Resolved file system path + query
*/
id = null;
file = null;
type;
info;
meta;
importers = /* @__PURE__ */ new Set();
clientImportedModules = /* @__PURE__ */ new Set();
ssrImportedModules = /* @__PURE__ */ new Set();
acceptedHmrDeps = /* @__PURE__ */ new Set();
acceptedHmrExports = null;
importedBindings = null;
isSelfAccepting;
transformResult = null;
ssrTransformResult = null;
ssrModule = null;
ssrError = null;
lastHMRTimestamp = 0;
/**
* `import.meta.hot.invalidate` is called by the client.
* If there's multiple clients, multiple `invalidate` request is received.
* This property is used to dedupe those request to avoid multiple updates happening.
* @internal
*/
lastHMRInvalidationReceived = false;
lastInvalidationTimestamp = 0;
/**
* If the module only needs to update its imports timestamp (e.g. within an HMR chain),
* it is considered soft-invalidated. In this state, its `transformResult` should exist,
* and the next `transformRequest` for this module will replace the timestamps.
*
* By default the value is `undefined` if it's not soft/hard-invalidated. If it gets
* soft-invalidated, this will contain the previous `transformResult` value. If it gets
* hard-invalidated, this will be set to `'HARD_INVALIDATED'`.
* @internal
*/
invalidationState;
/**
* @internal
*/
ssrInvalidationState;
/**
* The module urls that are statically imported in the code. This information is separated
* out from `importedModules` as only importers that statically import the module can be
* soft invalidated. Other imports (e.g. watched files) needs the importer to be hard invalidated.
* @internal
*/
staticImportedUrls;
/**
* @param setIsSelfAccepting - set `false` to set `isSelfAccepting` later. e.g. #7870
*/
constructor(url, setIsSelfAccepting = true) {
this.url = url;
this.type = isDirectCSSRequest(url) ? "css" : "js";
if (setIsSelfAccepting) {
this.isSelfAccepting = false;
}
}
get importedModules() {
const importedModules = new Set(this.clientImportedModules);
for (const module of this.ssrImportedModules) {
importedModules.add(module);
}
return importedModules;
}
}
class ModuleGraph {
constructor(resolveId) {
this.resolveId = resolveId;
}
urlToModuleMap = /* @__PURE__ */ new Map();
idToModuleMap = /* @__PURE__ */ new Map();
etagToModuleMap = /* @__PURE__ */ new Map();
// a single file may corresponds to multiple modules with different queries
fileToModulesMap = /* @__PURE__ */ new Map();
safeModulesPath = /* @__PURE__ */ new Set();
/**
* @internal
*/
_unresolvedUrlToModuleMap = /* @__PURE__ */ new Map();
/**
* @internal
*/
_ssrUnresolvedUrlToModuleMap = /* @__PURE__ */ new Map();
/** @internal */
_hasResolveFailedErrorModules = /* @__PURE__ */ new Set();
async getModuleByUrl(rawUrl, ssr) {
rawUrl = removeImportQuery(removeTimestampQuery(rawUrl));
const mod = this._getUnresolvedUrlToModule(rawUrl, ssr);
if (mod) {
return mod;
}
const [url] = await this._resolveUrl(rawUrl, ssr);
return this.urlToModuleMap.get(url);
}
getModuleById(id) {
return this.idToModuleMap.get(removeTimestampQuery(id));
}
getModulesByFile(file) {
return this.fileToModulesMap.get(file);
}
onFileChange(file) {
const mods = this.getModulesByFile(file);
if (mods) {
const seen = /* @__PURE__ */ new Set();
mods.forEach((mod) => {
this.invalidateModule(mod, seen);
});
}
}
onFileDelete(file) {
const mods = this.getModulesByFile(file);
if (mods) {
mods.forEach((mod) => {
mod.importedModules.forEach((importedMod) => {
importedMod.importers.delete(mod);
});
});
}
}
invalidateModule(mod, seen = /* @__PURE__ */ new Set(), timestamp = Date.now(), isHmr = false, softInvalidate = false) {
const prevInvalidationState = mod.invalidationState;
const prevSsrInvalidationState = mod.ssrInvalidationState;
if (softInvalidate) {
mod.invalidationState ??= mod.transformResult ?? "HARD_INVALIDATED";
mod.ssrInvalidationState ??= mod.ssrTransformResult ?? "HARD_INVALIDATED";
} else {
mod.invalidationState = "HARD_INVALIDATED";
mod.ssrInvalidationState = "HARD_INVALIDATED";
}
if (seen.has(mod) && prevInvalidationState === mod.invalidationState && prevSsrInvalidationState === mod.ssrInvalidationState) {
return;
}
seen.add(mod);
if (isHmr) {
mod.lastHMRTimestamp = timestamp;
mod.lastHMRInvalidationReceived = false;
} else {
mod.lastInvalidationTimestamp = timestamp;
}
const etag = mod.transformResult?.etag;
if (etag) this.etagToModuleMap.delete(etag);
mod.transformResult = null;
mod.ssrTransformResult = null;
mod.ssrModule = null;
mod.ssrError = null;
mod.importers.forEach((importer) => {
if (!importer.acceptedHmrDeps.has(mod)) {
const shouldSoftInvalidateImporter = importer.staticImportedUrls?.has(mod.url) || softInvalidate;
this.invalidateModule(
importer,
seen,
timestamp,
isHmr,
shouldSoftInvalidateImporter
);
}
});
this._hasResolveFailedErrorModules.delete(mod);
}
invalidateAll() {
const timestamp = Date.now();
const seen = /* @__PURE__ */ new Set();
this.idToModuleMap.forEach((mod) => {
this.invalidateModule(mod, seen, timestamp);
});
}
/**
* Update the module graph based on a module's updated imports information
* If there are dependencies that no longer have any importers, they are
* returned as a Set.
*
* @param staticImportedUrls Subset of `importedModules` where they're statically imported in code.
* This is only used for soft invalidations so `undefined` is fine but may cause more runtime processing.
*/
async updateModuleInfo(mod, importedModules, importedBindings, acceptedModules, acceptedExports, isSelfAccepting, ssr, staticImportedUrls) {
mod.isSelfAccepting = isSelfAccepting;
const prevImports = ssr ? mod.ssrImportedModules : mod.clientImportedModules;
let noLongerImported;
let resolvePromises = [];
let resolveResults = new Array(importedModules.size);
let index = 0;
for (const imported of importedModules) {
const nextIndex = index++;
if (typeof imported === "string") {
resolvePromises.push(
this.ensureEntryFromUrl(imported, ssr).then((dep) => {
dep.importers.add(mod);
resolveResults[nextIndex] = dep;
})
);
} else {
imported.importers.add(mod);
resolveResults[nextIndex] = imported;
}
}
if (resolvePromises.length) {
await Promise.all(resolvePromises);
}
const nextImports = new Set(resolveResults);
if (ssr) {
mod.ssrImportedModules = nextImports;
} else {
mod.clientImportedModules = nextImports;
}
prevImports.forEach((dep) => {
if (!mod.clientImportedModules.has(dep) && !mod.ssrImportedModules.has(dep)) {
dep.importers.delete(mod);
if (!dep.importers.size) {
(noLongerImported || (noLongerImported = /* @__PURE__ */ new Set())).add(dep);
}
}
});
resolvePromises = [];
resolveResults = new Array(acceptedModules.size);
index = 0;
for (const accepted of acceptedModules) {
const nextIndex = index++;
if (typeof accepted === "string") {
resolvePromises.push(
this.ensureEntryFromUrl(accepted, ssr).then((dep) => {
resolveResults[nextIndex] = dep;
})
);
} else {
resolveResults[nextIndex] = accepted;
}
}
if (resolvePromises.length) {
await Promise.all(resolvePromises);
}
mod.acceptedHmrDeps = new Set(resolveResults);
mod.staticImportedUrls = staticImportedUrls;
mod.acceptedHmrExports = acceptedExports;
mod.importedBindings = importedBindings;
return noLongerImported;
}
async ensureEntryFromUrl(rawUrl, ssr, setIsSelfAccepting = true) {
return this._ensureEntryFromUrl(rawUrl, ssr, setIsSelfAccepting);
}
/**
* @internal
*/
async _ensureEntryFromUrl(rawUrl, ssr, setIsSelfAccepting = true, resolved) {
rawUrl = removeImportQuery(removeTimestampQuery(rawUrl));
let mod = this._getUnresolvedUrlToModule(rawUrl, ssr);
if (mod) {
return mod;
}
const modPromise = (async () => {
const [url, resolvedId, meta] = await this._resolveUrl(
rawUrl,
ssr,
resolved
);
mod = this.idToModuleMap.get(resolvedId);
if (!mod) {
mod = new ModuleNode(url, setIsSelfAccepting);
if (meta) mod.meta = meta;
this.urlToModuleMap.set(url, mod);
mod.id = resolvedId;
this.idToModuleMap.set(resolvedId, mod);
const file = mod.file = cleanUrl(resolvedId);
let fileMappedModules = this.fileToModulesMap.get(file);
if (!fileMappedModules) {
fileMappedModules = /* @__PURE__ */ new Set();
this.fileToModulesMap.set(file, fileMappedModules);
}
fileMappedModules.add(mod);
} else if (!this.urlToModuleMap.has(url)) {
this.urlToModuleMap.set(url, mod);
}
this._setUnresolvedUrlToModule(rawUrl, mod, ssr);
return mod;
})();
this._setUnresolvedUrlToModule(rawUrl, modPromise, ssr);
return modPromise;
}
// some deps, like a css file referenced via @import, don't have its own
// url because they are inlined into the main css import. But they still
// need to be represented in the module graph so that they can trigger
// hmr in the importing css file.
createFileOnlyEntry(file) {
file = normalizePath$3(file);
let fileMappedModules = this.fileToModulesMap.get(file);
if (!fileMappedModules) {
fileMappedModules = /* @__PURE__ */ new Set();
this.fileToModulesMap.set(file, fileMappedModules);
}
const url = `${FS_PREFIX}${file}`;
for (const m of fileMappedModules) {
if (m.url === url || m.id === file) {
return m;
}
}
const mod = new ModuleNode(url);
mod.file = file;
fileMappedModules.add(mod);
return mod;
}
// for incoming urls, it is important to:
// 1. remove the HMR timestamp query (?t=xxxx) and the ?import query
// 2. resolve its extension so that urls with or without extension all map to
// the same module
async resolveUrl(url, ssr) {
url = removeImportQuery(removeTimestampQuery(url));
const mod = await this._getUnresolvedUrlToModule(url, ssr);
if (mod?.id) {
return [mod.url, mod.id, mod.meta];
}
return this._resolveUrl(url, ssr);
}
updateModuleTransformResult(mod, result, ssr) {
if (ssr) {
mod.ssrTransformResult = result;
} else {
const prevEtag = mod.transformResult?.etag;
if (prevEtag) this.etagToModuleMap.delete(prevEtag);
mod.transformResult = result;
if (result?.etag) this.etagToModuleMap.set(result.etag, mod);
}
}
getModuleByEtag(etag) {
return this.etagToModuleMap.get(etag);
}
/**
* @internal
*/
_getUnresolvedUrlToModule(url, ssr) {
return (ssr ? this._ssrUnresolvedUrlToModuleMap : this._unresolvedUrlToModuleMap).get(url);
}
/**
* @internal
*/
_setUnresolvedUrlToModule(url, mod, ssr) {
(ssr ? this._ssrUnresolvedUrlToModuleMap : this._unresolvedUrlToModuleMap).set(url, mod);
}
/**
* @internal
*/
async _resolveUrl(url, ssr, alreadyResolved) {
const resolved = alreadyResolved ?? await this.resolveId(url, !!ssr);
const resolvedId = resolved?.id || url;
if (url !== resolvedId && !url.includes("\0") && !url.startsWith(`virtual:`)) {
const ext = extname$1(cleanUrl(resolvedId));
if (ext) {
const pathname = cleanUrl(url);
if (!pathname.endsWith(ext)) {
url = pathname + ext + url.slice(pathname.length);
}
}
}
return [url, resolvedId, resolved?.meta];
}
}
function notFoundMiddleware() {
return function vite404Middleware(_, res) {
res.statusCode = 404;
res.end();
};
}
const ROOT_FILES = [
// '.git',
// https://pnpm.io/workspaces/
"pnpm-workspace.yaml",
// https://rushjs.io/pages/advanced/config_files/
// 'rush.json',
// https://nx.dev/latest/react/getting-started/nx-setup
// 'workspace.json',
// 'nx.json',
// https://github.com/lerna/lerna#lernajson
"lerna.json"
];
function hasWorkspacePackageJSON(root) {
const path = join$2(root, "package.json");
if (!isFileReadable(path)) {
return false;
}
try {
const content = JSON.parse(fs__default.readFileSync(path, "utf-8")) || {};
return !!content.workspaces;
} catch {
return false;
}
}
function hasRootFile(root) {
return ROOT_FILES.some((file) => fs__default.existsSync(join$2(root, file)));
}
function hasPackageJSON(root) {
const path = join$2(root, "package.json");
return fs__default.existsSync(path);
}
function searchForPackageRoot(current, root = current) {
if (hasPackageJSON(current)) return current;
const dir = dirname$2(current);
if (!dir || dir === current) return root;
return searchForPackageRoot(dir, root);
}
function searchForWorkspaceRoot(current, root = searchForPackageRoot(current)) {
if (hasRootFile(current)) return current;
if (hasWorkspacePackageJSON(current)) return current;
const dir = dirname$2(current);
if (!dir || dir === current) return root;
return searchForWorkspaceRoot(dir, root);
}
function warmupFiles(server) {
const options = server.config.server.warmup;
const root = server.config.root;
if (options?.clientFiles?.length) {
mapFiles(options.clientFiles, root).then((files) => {
for (const file of files) {
warmupFile(server, file, false);
}
});
}
if (options?.ssrFiles?.length) {
mapFiles(options.ssrFiles, root).then((files) => {
for (const file of files) {
warmupFile(server, file, true);
}
});
}
}
async function warmupFile(server, file, ssr) {
if (file.endsWith(".html")) {
const url = htmlFileToUrl(file, server.config.root);
if (url) {
try {
const html = await fsp.readFile(file, "utf-8");
await server.transformIndexHtml(url, html);
} catch (e) {
server.config.logger.error(
`Pre-transform error (${colors$1.cyan(file)}): ${e.message}`,
{
error: e,
timestamp: true
}
);
}
}
} else {
const url = fileToUrl(file, server.config.root);
await server.warmupRequest(url, { ssr });
}
}
function htmlFileToUrl(file, root) {
const url = path$n.relative(root, file);
if (url[0] === ".") return;
return "/" + normalizePath$3(url);
}
function fileToUrl(file, root) {
const url = path$n.relative(root, file);
if (url[0] === ".") {
return path$n.posix.join(FS_PREFIX, normalizePath$3(file));
}
return "/" + normalizePath$3(url);
}
function mapFiles(files, root) {
return glob(files, {
cwd: root,
absolute: true
});
}
function createServer(inlineConfig = {}) {
return _createServer(inlineConfig, { hotListen: true });
}
async function _createServer(inlineConfig = {}, options) {
const config = await resolveConfig(inlineConfig, "serve");
const initPublicFilesPromise = initPublicFiles(config);
const { root, server: serverConfig } = config;
const httpsOptions = await resolveHttpsConfig(config.server.https);
const { middlewareMode } = serverConfig;
const resolvedOutDirs = getResolvedOutDirs(
config.root,
config.build.outDir,
config.build.rollupOptions?.output
);
const emptyOutDir = resolveEmptyOutDir(
config.build.emptyOutDir,
config.root,
resolvedOutDirs
);
const resolvedWatchOptions = resolveChokidarOptions(
config,
{
disableGlobbing: true,
...serverConfig.watch
},
resolvedOutDirs,
emptyOutDir
);
const middlewares = connect$1();
const httpServer = middlewareMode ? null : await resolveHttpServer(serverConfig, middlewares, httpsOptions);
const ws = createWebSocketServer(httpServer, config, httpsOptions);
const hot = createHMRBroadcaster().addChannel(ws).addChannel(createServerHMRChannel());
if (typeof config.server.hmr === "object" && config.server.hmr.channels) {
config.server.hmr.channels.forEach((channel) => hot.addChannel(channel));
}
const publicFiles = await initPublicFilesPromise;
const { publicDir } = config;
if (httpServer) {
setClientErrorHandler(httpServer, config.logger);
}
const watchEnabled = serverConfig.watch !== null;
const watcher = watchEnabled ? chokidar.watch(
// config file dependencies and env file might be outside of root
[
root,
...config.configFileDependencies,
...getEnvFilesForMode(config.mode, config.envDir),
// Watch the public directory explicitly because it might be outside
// of the root directory.
...publicDir && publicFiles ? [publicDir] : []
],
resolvedWatchOptions
) : createNoopWatcher(resolvedWatchOptions);
const moduleGraph = new ModuleGraph(
(url, ssr) => container.resolveId(url, void 0, { ssr })
);
const container = await createPluginContainer(config, moduleGraph, watcher);
const closeHttpServer = createServerCloseFn(httpServer);
const devHtmlTransformFn = createDevHtmlTransformFn(config);
const onCrawlEndCallbacks = [];
const crawlEndFinder = setupOnCrawlEnd(() => {
onCrawlEndCallbacks.forEach((cb) => cb());
});
function waitForRequestsIdle(ignoredId) {
return crawlEndFinder.waitForRequestsIdle(ignoredId);
}
function _registerRequestProcessing(id, done) {
crawlEndFinder.registerRequestProcessing(id, done);
}
function _onCrawlEnd(cb) {
onCrawlEndCallbacks.push(cb);
}
let server = {
config,
middlewares,
httpServer,
watcher,
pluginContainer: container,
ws,
hot,
moduleGraph,
resolvedUrls: null,
// will be set on listen
ssrTransform(code, inMap, url, originalCode = code) {
return ssrTransform(code, inMap, url, originalCode, server.config);
},
transformRequest(url, options2) {
return transformRequest(url, server, options2);
},
async warmupRequest(url, options2) {
try {
await transformRequest(url, server, options2);
} catch (e) {
if (e?.code === ERR_OUTDATED_OPTIMIZED_DEP || e?.code === ERR_CLOSED_SERVER) {
return;
}
server.config.logger.error(`Pre-transform error: ${e.message}`, {
error: e,
timestamp: true
});
}
},
transformIndexHtml(url, html, originalUrl) {
return devHtmlTransformFn(server, url, html, originalUrl);
},
async ssrLoadModule(url, opts) {
return ssrLoadModule(url, server, opts?.fixStacktrace);
},
async ssrFetchModule(url, importer) {
return ssrFetchModule(server, url, importer);
},
ssrFixStacktrace(e) {
ssrFixStacktrace(e, moduleGraph);
},
ssrRewriteStacktrace(stack) {
return ssrRewriteStacktrace(stack, moduleGraph);
},
async reloadModule(module) {
if (serverConfig.hmr !== false && module.file) {
updateModules(module.file, [module], Date.now(), server);
}
},
async listen(port, isRestart) {
await startServer(server, port);
if (httpServer) {
server.resolvedUrls = await resolveServerUrls(
httpServer,
config.server,
config
);
if (!isRestart && config.server.open) server.openBrowser();
}
return server;
},
openBrowser() {
const options2 = server.config.server;
const url = server.resolvedUrls?.local[0] ?? server.resolvedUrls?.network[0];
if (url) {
const path2 = typeof options2.open === "string" ? new URL(options2.open, url).href : url;
if (server.config.server.preTransformRequests) {
setTimeout(() => {
const getMethod = path2.startsWith("https:") ? get$1 : get$2;
getMethod(
path2,
{
headers: {
// Allow the history middleware to redirect to /index.html
Accept: "text/html"
}
},
(res) => {
res.on("end", () => {
});
}
).on("error", () => {
}).end();
}, 0);
}
openBrowser(path2, true, server.config.logger);
} else {
server.config.logger.warn("No URL available to open in browser");
}
},
async close() {
if (!middlewareMode) {
teardownSIGTERMListener(closeServerAndExit);
}
await Promise.allSettled([
watcher.close(),
hot.close(),
container.close(),
crawlEndFinder?.cancel(),
getDepsOptimizer(server.config)?.close(),
getDepsOptimizer(server.config, true)?.close(),
closeHttpServer()
]);
while (server._pendingRequests.size > 0) {
await Promise.allSettled(
[...server._pendingRequests.values()].map(
(pending) => pending.request
)
);
}
server.resolvedUrls = null;
},
printUrls() {
if (server.resolvedUrls) {
printServerUrls(
server.resolvedUrls,
serverConfig.host,
config.logger.info
);
} else if (middlewareMode) {
throw new Error("cannot print server URLs in middleware mode.");
} else {
throw new Error(
"cannot print server URLs before server.listen is called."
);
}
},
bindCLIShortcuts(options2) {
bindCLIShortcuts(server, options2);
},
async restart(forceOptimize) {
if (!server._restartPromise) {
server._forceOptimizeOnRestart = !!forceOptimize;
server._restartPromise = restartServer(server).finally(() => {
server._restartPromise = null;
server._forceOptimizeOnRestart = false;
});
}
return server._restartPromise;
},
waitForRequestsIdle,
_registerRequestProcessing,
_onCrawlEnd,
_setInternalServer(_server) {
server = _server;
},
_restartPromise: null,
_importGlobMap: /* @__PURE__ */ new Map(),
_forceOptimizeOnRestart: false,
_pendingRequests: /* @__PURE__ */ new Map(),
_fsDenyGlob: picomatch$4(
// matchBase: true does not work as it's documented
// https://github.com/micromatch/picomatch/issues/89
// convert patterns without `/` on our side for now
config.server.fs.deny.map(
(pattern) => pattern.includes("/") ? pattern : `**/${pattern}`
),
{
matchBase: false,
nocase: true,
dot: true
}
),
_shortcutsOptions: void 0
};
const reflexServer = new Proxy(server, {
get: (_, property) => {
return server[property];
},
set: (_, property, value) => {
server[property] = value;
return true;
}
});
const closeServerAndExit = async () => {
try {
await server.close();
} finally {
process.exit();
}
};
if (!middlewareMode) {
setupSIGTERMListener(closeServerAndExit);
}
const onHMRUpdate = async (type, file) => {
if (serverConfig.hmr !== false) {
try {
await handleHMRUpdate(type, file, server);
} catch (err) {
hot.send({
type: "error",
err: prepareError(err)
});
}
}
};
const onFileAddUnlink = async (file, isUnlink) => {
file = normalizePath$3(file);
await container.watchChange(file, { event: isUnlink ? "delete" : "create" });
if (publicDir && publicFiles) {
if (file.startsWith(publicDir)) {
const path2 = file.slice(publicDir.length);
publicFiles[isUnlink ? "delete" : "add"](path2);
if (!isUnlink) {
const moduleWithSamePath = await moduleGraph.getModuleByUrl(path2);
const etag = moduleWithSamePath?.transformResult?.etag;
if (etag) {
moduleGraph.etagToModuleMap.delete(etag);
}
}
}
}
if (isUnlink) moduleGraph.onFileDelete(file);
await onHMRUpdate(isUnlink ? "delete" : "create", file);
};
watcher.on("change", async (file) => {
file = normalizePath$3(file);
await container.watchChange(file, { event: "update" });
moduleGraph.onFileChange(file);
await onHMRUpdate("update", file);
});
getFsUtils(config).initWatcher?.(watcher);
watcher.on("add", (file) => {
onFileAddUnlink(file, false);
});
watcher.on("unlink", (file) => {
onFileAddUnlink(file, true);
});
hot.on("vite:invalidate", async ({ path: path2, message }) => {
const mod = moduleGraph.urlToModuleMap.get(path2);
if (mod && mod.isSelfAccepting && mod.lastHMRTimestamp > 0 && !mod.lastHMRInvalidationReceived) {
mod.lastHMRInvalidationReceived = true;
config.logger.info(
colors$1.yellow(`hmr invalidate `) + colors$1.dim(path2) + (message ? ` ${message}` : ""),
{ timestamp: true }
);
const file = getShortName(mod.file, config.root);
updateModules(
file,
[...mod.importers],
mod.lastHMRTimestamp,
server,
true
);
}
});
if (!middlewareMode && httpServer) {
httpServer.once("listening", () => {
serverConfig.port = httpServer.address().port;
});
}
const postHooks = [];
for (const hook of config.getSortedPluginHooks("configureServer")) {
postHooks.push(await hook(reflexServer));
}
if (process.env.DEBUG) {
middlewares.use(timeMiddleware(root));
}
const { cors } = serverConfig;
if (cors !== false) {
middlewares.use(corsMiddleware(typeof cors === "boolean" ? {} : cors));
}
middlewares.use(cachedTransformMiddleware(server));
const { proxy } = serverConfig;
if (proxy) {
const middlewareServer = (isObject$1(middlewareMode) ? middlewareMode.server : null) || httpServer;
middlewares.use(proxyMiddleware(middlewareServer, proxy, config));
}
if (config.base !== "/") {
middlewares.use(baseMiddleware(config.rawBase, !!middlewareMode));
}
middlewares.use("/__open-in-editor", launchEditorMiddleware$1());
middlewares.use(function viteHMRPingMiddleware(req, res, next) {
if (req.headers["accept"] === "text/x-vite-ping") {
res.writeHead(204).end();
} else {
next();
}
});
if (publicDir) {
middlewares.use(servePublicMiddleware(server, publicFiles));
}
middlewares.use(transformMiddleware(server));
middlewares.use(serveRawFsMiddleware(server));
middlewares.use(serveStaticMiddleware(server));
if (config.appType === "spa" || config.appType === "mpa") {
middlewares.use(
htmlFallbackMiddleware(
root,
config.appType === "spa",
getFsUtils(config)
)
);
}
postHooks.forEach((fn) => fn && fn());
if (config.appType === "spa" || config.appType === "mpa") {
middlewares.use(indexHtmlMiddleware(root, server));
middlewares.use(notFoundMiddleware());
}
middlewares.use(errorMiddleware(server, !!middlewareMode));
let initingServer;
let serverInited = false;
const initServer = async () => {
if (serverInited) return;
if (initingServer) return initingServer;
initingServer = async function() {
await container.buildStart({});
if (isDepsOptimizerEnabled(config, false)) {
await initDepsOptimizer(config, server);
}
warmupFiles(server);
initingServer = void 0;
serverInited = true;
}();
return initingServer;
};
if (!middlewareMode && httpServer) {
const listen = httpServer.listen.bind(httpServer);
httpServer.listen = async (port, ...args) => {
try {
hot.listen();
await initServer();
} catch (e) {
httpServer.emit("error", e);
return;
}
return listen(port, ...args);
};
} else {
if (options.hotListen) {
hot.listen();
}
await initServer();
}
return server;
}
async function startServer(server, inlinePort) {
const httpServer = server.httpServer;
if (!httpServer) {
throw new Error("Cannot call server.listen in middleware mode.");
}
const options = server.config.server;
const hostname = await resolveHostname(options.host);
const configPort = inlinePort ?? options.port;
const port = (!configPort || configPort === server._configServerPort ? server._currentServerPort : configPort) ?? DEFAULT_DEV_PORT;
server._configServerPort = configPort;
const serverPort = await httpServerStart(httpServer, {
port,
strictPort: options.strictPort,
host: hostname.host,
logger: server.config.logger
});
server._currentServerPort = serverPort;
}
function createServerCloseFn(server) {
if (!server) {
return () => Promise.resolve();
}
let hasListened = false;
const openSockets = /* @__PURE__ */ new Set();
server.on("connection", (socket) => {
openSockets.add(socket);
socket.on("close", () => {
openSockets.delete(socket);
});
});
server.once("listening", () => {
hasListened = true;
});
return () => new Promise((resolve, reject) => {
openSockets.forEach((s) => s.destroy());
if (hasListened) {
server.close((err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
} else {
resolve();
}
});
}
function resolvedAllowDir(root, dir) {
return normalizePath$3(path$n.resolve(root, dir));
}
function resolveServerOptions(root, raw, logger) {
const server = {
preTransformRequests: true,
...raw,
sourcemapIgnoreList: raw?.sourcemapIgnoreList === false ? () => false : raw?.sourcemapIgnoreList || isInNodeModules$1,
middlewareMode: raw?.middlewareMode || false
};
let allowDirs = server.fs?.allow;
const deny = server.fs?.deny || [".env", ".env.*", "*.{crt,pem}"];
if (!allowDirs) {
allowDirs = [searchForWorkspaceRoot(root)];
}
if (process.versions.pnp) {
try {
const enableGlobalCache = execSync("yarn config get enableGlobalCache", { cwd: root }).toString().trim() === "true";
const yarnCacheDir = execSync(
`yarn config get ${enableGlobalCache ? "globalFolder" : "cacheFolder"}`,
{ cwd: root }
).toString().trim();
allowDirs.push(yarnCacheDir);
} catch (e) {
logger.warn(`Get yarn cache dir error: ${e.message}`, {
timestamp: true
});
}
}
allowDirs = allowDirs.map((i) => resolvedAllowDir(root, i));
const resolvedClientDir = resolvedAllowDir(root, CLIENT_DIR);
if (!allowDirs.some((dir) => isParentDirectory(dir, resolvedClientDir))) {
allowDirs.push(resolvedClientDir);
}
server.fs = {
strict: server.fs?.strict ?? true,
allow: allowDirs,
deny,
cachedChecks: server.fs?.cachedChecks
};
if (server.origin?.endsWith("/")) {
server.origin = server.origin.slice(0, -1);
logger.warn(
colors$1.yellow(
`${colors$1.bold("(!)")} server.origin should not end with "/". Using "${server.origin}" instead.`
)
);
}
return server;
}
async function restartServer(server) {
global.__vite_start_time = performance$1.now();
const shortcutsOptions = server._shortcutsOptions;
let inlineConfig = server.config.inlineConfig;
if (server._forceOptimizeOnRestart) {
inlineConfig = mergeConfig(inlineConfig, {
optimizeDeps: {
force: true
}
});
}
{
let newServer = null;
try {
newServer = await _createServer(inlineConfig, { hotListen: false });
} catch (err) {
server.config.logger.error(err.message, {
timestamp: true
});
server.config.logger.error("server restart failed", { timestamp: true });
return;
}
await server.close();
const middlewares = server.middlewares;
newServer._configServerPort = server._configServerPort;
newServer._currentServerPort = server._currentServerPort;
Object.assign(server, newServer);
middlewares.stack = newServer.middlewares.stack;
server.middlewares = middlewares;
newServer._setInternalServer(server);
}
const {
logger,
server: { port, middlewareMode }
} = server.config;
if (!middlewareMode) {
await server.listen(port, true);
} else {
server.hot.listen();
}
logger.info("server restarted.", { timestamp: true });
if (shortcutsOptions) {
shortcutsOptions.print = false;
bindCLIShortcuts(server, shortcutsOptions);
}
}
async function restartServerWithUrls(server) {
if (server.config.server.middlewareMode) {
await server.restart();
return;
}
const { port: prevPort, host: prevHost } = server.config.server;
const prevUrls = server.resolvedUrls;
await server.restart();
const {
logger,
server: { port, host }
} = server.config;
if ((port ?? DEFAULT_DEV_PORT) !== (prevPort ?? DEFAULT_DEV_PORT) || host !== prevHost || diffDnsOrderChange(prevUrls, server.resolvedUrls)) {
logger.info("");
server.printUrls();
}
}
const callCrawlEndIfIdleAfterMs = 50;
function setupOnCrawlEnd(onCrawlEnd) {
const registeredIds = /* @__PURE__ */ new Set();
const seenIds = /* @__PURE__ */ new Set();
const onCrawlEndPromiseWithResolvers = promiseWithResolvers();
let timeoutHandle;
let cancelled = false;
function cancel() {
cancelled = true;
}
let crawlEndCalled = false;
function callOnCrawlEnd() {
if (!cancelled && !crawlEndCalled) {
crawlEndCalled = true;
onCrawlEnd();
}
onCrawlEndPromiseWithResolvers.resolve();
}
function registerRequestProcessing(id, done) {
if (!seenIds.has(id)) {
seenIds.add(id);
registeredIds.add(id);
done().catch(() => {
}).finally(() => markIdAsDone(id));
}
}
function waitForRequestsIdle(ignoredId) {
if (ignoredId) {
seenIds.add(ignoredId);
markIdAsDone(ignoredId);
}
return onCrawlEndPromiseWithResolvers.promise;
}
function markIdAsDone(id) {
if (registeredIds.has(id)) {
registeredIds.delete(id);
checkIfCrawlEndAfterTimeout();
}
}
function checkIfCrawlEndAfterTimeout() {
if (cancelled || registeredIds.size > 0) return;
if (timeoutHandle) clearTimeout(timeoutHandle);
timeoutHandle = setTimeout(
callOnCrawlEndWhenIdle,
callCrawlEndIfIdleAfterMs
);
}
async function callOnCrawlEndWhenIdle() {
if (cancelled || registeredIds.size > 0) return;
callOnCrawlEnd();
}
return {
registerRequestProcessing,
waitForRequestsIdle,
cancel
};
}
var index = {
__proto__: null,
_createServer: _createServer,
createServer: createServer,
createServerCloseFn: createServerCloseFn,
resolveServerOptions: resolveServerOptions,
restartServerWithUrls: restartServerWithUrls
};
const debugHmr = createDebugger("vite:hmr");
const whitespaceRE = /\s/;
const normalizedClientDir = normalizePath$3(CLIENT_DIR);
function getShortName(file, root) {
return file.startsWith(withTrailingSlash(root)) ? path$n.posix.relative(root, file) : file;
}
async function handleHMRUpdate(type, file, server) {
const { hot, config, moduleGraph } = server;
const shortFile = getShortName(file, config.root);
const isConfig = file === config.configFile;
const isConfigDependency = config.configFileDependencies.some(
(name) => file === name
);
const isEnv = config.inlineConfig.envFile !== false && getEnvFilesForMode(config.mode, config.envDir).includes(file);
if (isConfig || isConfigDependency || isEnv) {
debugHmr?.(`[config change] ${colors$1.dim(shortFile)}`);
config.logger.info(
colors$1.green(
`${normalizePath$3(
path$n.relative(process.cwd(), file)
)} changed, restarting server...`
),
{ clear: true, timestamp: true }
);
try {
await restartServerWithUrls(server);
} catch (e) {
config.logger.error(colors$1.red(e));
}
return;
}
debugHmr?.(`[file change] ${colors$1.dim(shortFile)}`);
if (file.startsWith(withTrailingSlash(normalizedClientDir))) {
hot.send({
type: "full-reload",
path: "*",
triggeredBy: path$n.resolve(config.root, file)
});
return;
}
const mods = new Set(moduleGraph.getModulesByFile(file));
if (type === "create") {
for (const mod of moduleGraph._hasResolveFailedErrorModules) {
mods.add(mod);
}
}
if (type === "create" || type === "delete") {
for (const mod of getAffectedGlobModules(file, server)) {
mods.add(mod);
}
}
const timestamp = Date.now();
const hmrContext = {
file,
timestamp,
modules: [...mods],
read: () => readModifiedFile(file),
server
};
if (type === "update") {
for (const hook of config.getSortedPluginHooks("handleHotUpdate")) {
const filteredModules = await hook(hmrContext);
if (filteredModules) {
hmrContext.modules = filteredModules;
}
}
}
if (!hmrContext.modules.length) {
if (file.endsWith(".html")) {
config.logger.info(colors$1.green(`page reload `) + colors$1.dim(shortFile), {
clear: true,
timestamp: true
});
hot.send({
type: "full-reload",
path: config.server.middlewareMode ? "*" : "/" + normalizePath$3(path$n.relative(config.root, file))
});
} else {
debugHmr?.(`[no modules matched] ${colors$1.dim(shortFile)}`);
}
return;
}
updateModules(shortFile, hmrContext.modules, timestamp, server);
}
function updateModules(file, modules, timestamp, { config, hot, moduleGraph }, afterInvalidation) {
const updates = [];
const invalidatedModules = /* @__PURE__ */ new Set();
const traversedModules = /* @__PURE__ */ new Set();
let needFullReload = modules.length === 0;
for (const mod of modules) {
const boundaries = [];
const hasDeadEnd = propagateUpdate(mod, traversedModules, boundaries);
moduleGraph.invalidateModule(mod, invalidatedModules, timestamp, true);
if (needFullReload) {
continue;
}
if (hasDeadEnd) {
needFullReload = hasDeadEnd;
continue;
}
updates.push(
...boundaries.map(
({ boundary, acceptedVia, isWithinCircularImport }) => ({
type: `${boundary.type}-update`,
timestamp,
path: normalizeHmrUrl(boundary.url),
acceptedPath: normalizeHmrUrl(acceptedVia.url),
explicitImportRequired: boundary.type === "js" ? isExplicitImportRequired(acceptedVia.url) : false,
isWithinCircularImport,
// browser modules are invalidated by changing ?t= query,
// but in ssr we control the module system, so we can directly remove them form cache
ssrInvalidates: getSSRInvalidatedImporters(acceptedVia)
})
)
);
}
if (needFullReload) {
const reason = typeof needFullReload === "string" ? colors$1.dim(` (${needFullReload})`) : "";
config.logger.info(
colors$1.green(`page reload `) + colors$1.dim(file) + reason,
{ clear: !afterInvalidation, timestamp: true }
);
hot.send({
type: "full-reload",
triggeredBy: path$n.resolve(config.root, file)
});
return;
}
if (updates.length === 0) {
debugHmr?.(colors$1.yellow(`no update happened `) + colors$1.dim(file));
return;
}
config.logger.info(
colors$1.green(`hmr update `) + colors$1.dim([...new Set(updates.map((u) => u.path))].join(", ")),
{ clear: !afterInvalidation, timestamp: true }
);
hot.send({
type: "update",
updates
});
}
function populateSSRImporters(module, timestamp, seen = /* @__PURE__ */ new Set()) {
module.ssrImportedModules.forEach((importer) => {
if (seen.has(importer)) {
return;
}
if (importer.lastHMRTimestamp === timestamp || importer.lastInvalidationTimestamp === timestamp) {
seen.add(importer);
populateSSRImporters(importer, timestamp, seen);
}
});
return seen;
}
function getSSRInvalidatedImporters(module) {
return [...populateSSRImporters(module, module.lastHMRTimestamp)].map(
(m) => m.file
);
}
function areAllImportsAccepted(importedBindings, acceptedExports) {
for (const binding of importedBindings) {
if (!acceptedExports.has(binding)) {
return false;
}
}
return true;
}
function propagateUpdate(node, traversedModules, boundaries, currentChain = [node]) {
if (traversedModules.has(node)) {
return false;
}
traversedModules.add(node);
if (node.id && node.isSelfAccepting === void 0) {
debugHmr?.(
`[propagate update] stop propagation because not analyzed: ${colors$1.dim(
node.id
)}`
);
return false;
}
if (node.isSelfAccepting) {
boundaries.push({
boundary: node,
acceptedVia: node,
isWithinCircularImport: isNodeWithinCircularImports(node, currentChain)
});
for (const importer of node.importers) {
if (isCSSRequest(importer.url) && !currentChain.includes(importer)) {
propagateUpdate(
importer,
traversedModules,
boundaries,
currentChain.concat(importer)
);
}
}
return false;
}
if (node.acceptedHmrExports) {
boundaries.push({
boundary: node,
acceptedVia: node,
isWithinCircularImport: isNodeWithinCircularImports(node, currentChain)
});
} else {
if (!node.importers.size) {
return true;
}
if (!isCSSRequest(node.url) && [...node.importers].every((i) => isCSSRequest(i.url))) {
return true;
}
}
for (const importer of node.importers) {
const subChain = currentChain.concat(importer);
if (importer.acceptedHmrDeps.has(node)) {
boundaries.push({
boundary: importer,
acceptedVia: node,
isWithinCircularImport: isNodeWithinCircularImports(importer, subChain)
});
continue;
}
if (node.id && node.acceptedHmrExports && importer.importedBindings) {
const importedBindingsFromNode = importer.importedBindings.get(node.id);
if (importedBindingsFromNode && areAllImportsAccepted(importedBindingsFromNode, node.acceptedHmrExports)) {
continue;
}
}
if (!currentChain.includes(importer) && propagateUpdate(importer, traversedModules, boundaries, subChain)) {
return true;
}
}
return false;
}
function isNodeWithinCircularImports(node, nodeChain, currentChain = [node], traversedModules = /* @__PURE__ */ new Set()) {
if (traversedModules.has(node)) {
return false;
}
traversedModules.add(node);
for (const importer of node.importers) {
if (importer === node) continue;
if (isCSSRequest(importer.url)) continue;
const importerIndex = nodeChain.indexOf(importer);
if (importerIndex > -1) {
if (debugHmr) {
const importChain = [
importer,
...[...currentChain].reverse(),
...nodeChain.slice(importerIndex, -1).reverse()
];
debugHmr(
colors$1.yellow(`circular imports detected: `) + importChain.map((m) => colors$1.dim(m.url)).join(" -> ")
);
}
return true;
}
if (!currentChain.includes(importer)) {
const result = isNodeWithinCircularImports(
importer,
nodeChain,
currentChain.concat(importer),
traversedModules
);
if (result) return result;
}
}
return false;
}
function handlePrunedModules(mods, { hot }) {
const t = Date.now();
mods.forEach((mod) => {
mod.lastHMRTimestamp = t;
mod.lastHMRInvalidationReceived = false;
debugHmr?.(`[dispose] ${colors$1.dim(mod.file)}`);
});
hot.send({
type: "prune",
paths: [...mods].map((m) => m.url)
});
}
function lexAcceptedHmrDeps(code, start, urls) {
let state = 0 /* inCall */;
let prevState = 0 /* inCall */;
let currentDep = "";
function addDep(index) {
urls.add({
url: currentDep,
start: index - currentDep.length - 1,
end: index + 1
});
currentDep = "";
}
for (let i = start; i < code.length; i++) {
const char = code.charAt(i);
switch (state) {
case 0 /* inCall */:
case 4 /* inArray */:
if (char === `'`) {
prevState = state;
state = 1 /* inSingleQuoteString */;
} else if (char === `"`) {
prevState = state;
state = 2 /* inDoubleQuoteString */;
} else if (char === "`") {
prevState = state;
state = 3 /* inTemplateString */;
} else if (whitespaceRE.test(char)) {
continue;
} else {
if (state === 0 /* inCall */) {
if (char === `[`) {
state = 4 /* inArray */;
} else {
return true;
}
} else if (state === 4 /* inArray */) {
if (char === `]`) {
return false;
} else if (char === ",") {
continue;
} else {
error(i);
}
}
}
break;
case 1 /* inSingleQuoteString */:
if (char === `'`) {
addDep(i);
if (prevState === 0 /* inCall */) {
return false;
} else {
state = prevState;
}
} else {
currentDep += char;
}
break;
case 2 /* inDoubleQuoteString */:
if (char === `"`) {
addDep(i);
if (prevState === 0 /* inCall */) {
return false;
} else {
state = prevState;
}
} else {
currentDep += char;
}
break;
case 3 /* inTemplateString */:
if (char === "`") {
addDep(i);
if (prevState === 0 /* inCall */) {
return false;
} else {
state = prevState;
}
} else if (char === "$" && code.charAt(i + 1) === "{") {
error(i);
} else {
currentDep += char;
}
break;
default:
throw new Error("unknown import.meta.hot lexer state");
}
}
return false;
}
function lexAcceptedHmrExports(code, start, exportNames) {
const urls = /* @__PURE__ */ new Set();
lexAcceptedHmrDeps(code, start, urls);
for (const { url } of urls) {
exportNames.add(url);
}
return urls.size > 0;
}
function normalizeHmrUrl(url) {
if (url[0] !== "." && url[0] !== "/") {
url = wrapId$1(url);
}
return url;
}
function error(pos) {
const err = new Error(
`import.meta.hot.accept() can only accept string literals or an Array of string literals.`
);
err.pos = pos;
throw err;
}
async function readModifiedFile(file) {
const content = await fsp.readFile(file, "utf-8");
if (!content) {
const mtime = (await fsp.stat(file)).mtimeMs;
for (let n = 0; n < 10; n++) {
await new Promise((r) => setTimeout(r, 10));
const newMtime = (await fsp.stat(file)).mtimeMs;
if (newMtime !== mtime) {
break;
}
}
return await fsp.readFile(file, "utf-8");
} else {
return content;
}
}
function createHMRBroadcaster() {
const channels = [];
const readyChannels = /* @__PURE__ */ new WeakSet();
const broadcaster = {
get channels() {
return [...channels];
},
addChannel(channel) {
if (channels.some((c) => c.name === channel.name)) {
throw new Error(`HMR channel "${channel.name}" is already defined.`);
}
channels.push(channel);
return broadcaster;
},
on(event, listener) {
if (event === "connection") {
const channels2 = this.channels;
channels2.forEach(
(channel) => channel.on("connection", () => {
readyChannels.add(channel);
if (channels2.every((c) => readyChannels.has(c))) {
listener();
}
})
);
return;
}
channels.forEach((channel) => channel.on(event, listener));
return;
},
off(event, listener) {
channels.forEach((channel) => channel.off(event, listener));
return;
},
send(...args) {
channels.forEach((channel) => channel.send(...args));
},
listen() {
channels.forEach((channel) => channel.listen());
},
close() {
return Promise.all(channels.map((channel) => channel.close()));
}
};
return broadcaster;
}
function createServerHMRChannel() {
const innerEmitter = new EventEmitter$4();
const outsideEmitter = new EventEmitter$4();
return {
name: "ssr",
send(...args) {
let payload;
if (typeof args[0] === "string") {
payload = {
type: "custom",
event: args[0],
data: args[1]
};
} else {
payload = args[0];
}
outsideEmitter.emit("send", payload);
},
off(event, listener) {
innerEmitter.off(event, listener);
},
on: (event, listener) => {
innerEmitter.on(event, listener);
},
close() {
innerEmitter.removeAllListeners();
outsideEmitter.removeAllListeners();
},
listen() {
innerEmitter.emit("connection");
},
api: {
innerEmitter,
outsideEmitter
}
};
}
const debug$1 = createDebugger("vite:import-analysis");
const clientDir = normalizePath$3(CLIENT_DIR);
const skipRE = /\.(?:map|json)(?:$|\?)/;
const canSkipImportAnalysis = (id) => skipRE.test(id) || isDirectCSSRequest(id);
const optimizedDepChunkRE = /\/chunk-[A-Z\d]{8}\.js/;
const optimizedDepDynamicRE = /-[A-Z\d]{8}\.js/;
const hasViteIgnoreRE = /\/\*\s*@vite-ignore\s*\*\//;
const urlIsStringRE = /^(?:'.*'|".*"|`.*`)$/;
const templateLiteralRE = /^\s*`(.*)`\s*$/;
function isExplicitImportRequired(url) {
return !isJSRequest(url) && !isCSSRequest(url);
}
function extractImportedBindings(id, source, importSpec, importedBindings) {
let bindings = importedBindings.get(id);
if (!bindings) {
bindings = /* @__PURE__ */ new Set();
importedBindings.set(id, bindings);
}
const isDynamic = importSpec.d > -1;
const isMeta = importSpec.d === -2;
if (isDynamic || isMeta) {
bindings.add("*");
return;
}
const exp = source.slice(importSpec.ss, importSpec.se);
ESM_STATIC_IMPORT_RE.lastIndex = 0;
const match = ESM_STATIC_IMPORT_RE.exec(exp);
if (!match) {
return;
}
const staticImport = {
type: "static",
code: match[0],
start: match.index,
end: match.index + match[0].length,
imports: match.groups.imports,
specifier: match.groups.specifier
};
const parsed = parseStaticImport(staticImport);
if (!parsed) {
return;
}
if (parsed.namespacedImport) {
bindings.add("*");
}
if (parsed.defaultImport) {
bindings.add("default");
}
if (parsed.namedImports) {
for (const name of Object.keys(parsed.namedImports)) {
bindings.add(name);
}
}
}
function importAnalysisPlugin(config) {
const { root, base } = config;
const fsUtils = getFsUtils(config);
const clientPublicPath = path$n.posix.join(base, CLIENT_PUBLIC_PATH);
const enablePartialAccept = config.experimental?.hmrPartialAccept;
const matchAlias = getAliasPatternMatcher(config.resolve.alias);
let server;
let _env;
let _ssrEnv;
function getEnv(ssr) {
if (!_ssrEnv || !_env) {
const importMetaEnvKeys = {};
const userDefineEnv = {};
for (const key in config.env) {
importMetaEnvKeys[key] = JSON.stringify(config.env[key]);
}
for (const key in config.define) {
if (key.startsWith("import.meta.env.")) {
userDefineEnv[key.slice(16)] = config.define[key];
}
}
const env = `import.meta.env = ${serializeDefine({
...importMetaEnvKeys,
SSR: "__vite_ssr__",
...userDefineEnv
})};`;
_ssrEnv = env.replace("__vite_ssr__", "true");
_env = env.replace("__vite_ssr__", "false");
}
return ssr ? _ssrEnv : _env;
}
return {
name: "vite:import-analysis",
configureServer(_server) {
server = _server;
},
async transform(source, importer, options) {
if (!server) {
return null;
}
const ssr = options?.ssr === true;
if (canSkipImportAnalysis(importer)) {
debug$1?.(colors$1.dim(`[skipped] ${prettifyUrl(importer, root)}`));
return null;
}
const msAtStart = debug$1 ? performance$1.now() : 0;
await init;
let imports;
let exports;
source = stripBomTag(source);
try {
[imports, exports] = parse$d(source);
} catch (_e) {
const e = _e;
const { message, showCodeFrame } = createParseErrorInfo(
importer,
source
);
this.error(message, showCodeFrame ? e.idx : void 0);
}
const depsOptimizer = getDepsOptimizer(config, ssr);
const { moduleGraph } = server;
const importerModule = moduleGraph.getModuleById(importer);
if (!importerModule) {
throwOutdatedRequest(importer);
}
if (!imports.length && !this._addedImports) {
importerModule.isSelfAccepting = false;
debug$1?.(
`${timeFrom(msAtStart)} ${colors$1.dim(
`[no imports] ${prettifyUrl(importer, root)}`
)}`
);
return source;
}
let hasHMR = false;
let isSelfAccepting = false;
let hasEnv = false;
let needQueryInjectHelper = false;
let s;
const str = () => s || (s = new MagicString(source));
let isPartiallySelfAccepting = false;
const importedBindings = enablePartialAccept ? /* @__PURE__ */ new Map() : null;
const toAbsoluteUrl = (url) => path$n.posix.resolve(path$n.posix.dirname(importerModule.url), url);
const normalizeUrl = async (url, pos, forceSkipImportAnalysis = false) => {
url = stripBase(url, base);
let importerFile = importer;
const optimizeDeps = getDepOptimizationConfig(config, ssr);
if (moduleListContains(optimizeDeps?.exclude, url)) {
if (depsOptimizer) {
await depsOptimizer.scanProcessing;
for (const optimizedModule of depsOptimizer.metadata.depInfoList) {
if (!optimizedModule.src) continue;
if (optimizedModule.file === importerModule.file) {
importerFile = optimizedModule.src;
}
}
}
}
const resolved = await this.resolve(url, importerFile);
if (!resolved || resolved.meta?.["vite:alias"]?.noResolved) {
if (ssr) {
return [url, url];
}
importerModule.isSelfAccepting = false;
moduleGraph._hasResolveFailedErrorModules.add(importerModule);
return this.error(
`Failed to resolve import "${url}" from "${normalizePath$3(
path$n.relative(process.cwd(), importerFile)
)}". Does the file exist?`,
pos
);
}
if (isExternalUrl(resolved.id)) {
return [resolved.id, resolved.id];
}
const isRelative = url[0] === ".";
const isSelfImport = !isRelative && cleanUrl(url) === cleanUrl(importer);
if (resolved.id.startsWith(withTrailingSlash(root))) {
url = resolved.id.slice(root.length);
} else if (depsOptimizer?.isOptimizedDepFile(resolved.id) || // vite-plugin-react isn't following the leading \0 virtual module convention.
// This is a temporary hack to avoid expensive fs checks for React apps.
// We'll remove this as soon we're able to fix the react plugins.
resolved.id !== "/@react-refresh" && path$n.isAbsolute(resolved.id) && fsUtils.existsSync(cleanUrl(resolved.id))) {
url = path$n.posix.join(FS_PREFIX, resolved.id);
} else {
url = resolved.id;
}
if (url[0] !== "." && url[0] !== "/") {
url = wrapId$1(resolved.id);
}
if (!ssr) {
if (isExplicitImportRequired(url)) {
url = injectQuery(url, "import");
} else if ((isRelative || isSelfImport) && !DEP_VERSION_RE.test(url)) {
const versionMatch = DEP_VERSION_RE.exec(importer);
if (versionMatch) {
url = injectQuery(url, versionMatch[1]);
}
}
try {
const depModule = await moduleGraph._ensureEntryFromUrl(
unwrapId$1(url),
ssr,
canSkipImportAnalysis(url) || forceSkipImportAnalysis,
resolved
);
if (depModule.lastHMRTimestamp > 0) {
url = injectQuery(url, `t=${depModule.lastHMRTimestamp}`);
}
} catch (e) {
e.pos = pos;
throw e;
}
url = joinUrlSegments(base, url);
}
return [url, resolved.id];
};
const orderedImportedUrls = new Array(imports.length);
const orderedAcceptedUrls = new Array(
imports.length
);
const orderedAcceptedExports = new Array(
imports.length
);
await Promise.all(
imports.map(async (importSpecifier, index) => {
const {
s: start,
e: end,
ss: expStart,
se: expEnd,
d: dynamicIndex,
a: attributeIndex
} = importSpecifier;
let specifier = importSpecifier.n;
const rawUrl = source.slice(start, end);
if (rawUrl === "import.meta") {
const prop = source.slice(end, end + 4);
if (prop === ".hot") {
hasHMR = true;
const endHot = end + 4 + (source[end + 4] === "?" ? 1 : 0);
if (source.slice(endHot, endHot + 7) === ".accept") {
if (source.slice(endHot, endHot + 14) === ".acceptExports") {
const importAcceptedExports = orderedAcceptedExports[index] = /* @__PURE__ */ new Set();
lexAcceptedHmrExports(
source,
source.indexOf("(", endHot + 14) + 1,
importAcceptedExports
);
isPartiallySelfAccepting = true;
} else {
const importAcceptedUrls = orderedAcceptedUrls[index] = /* @__PURE__ */ new Set();
if (lexAcceptedHmrDeps(
source,
source.indexOf("(", endHot + 7) + 1,
importAcceptedUrls
)) {
isSelfAccepting = true;
}
}
}
} else if (prop === ".env") {
hasEnv = true;
}
return;
} else if (templateLiteralRE.test(rawUrl)) {
if (!(rawUrl.includes("${") && rawUrl.includes("}"))) {
specifier = rawUrl.replace(templateLiteralRE, "$1");
}
}
const isDynamicImport = dynamicIndex > -1;
if (!isDynamicImport && attributeIndex > -1) {
str().remove(end + 1, expEnd);
}
if (specifier !== void 0) {
if (isExternalUrl(specifier) || isDataUrl(specifier)) {
return;
}
if (ssr && !matchAlias(specifier)) {
if (shouldExternalizeForSSR(specifier, importer, config)) {
return;
}
if (isBuiltin(specifier)) {
return;
}
}
if (specifier === clientPublicPath) {
return;
}
if (specifier[0] === "/" && !(config.assetsInclude(cleanUrl(specifier)) || urlRE.test(specifier)) && checkPublicFile(specifier, config)) {
throw new Error(
`Cannot import non-asset file ${specifier} which is inside /public. JS/CSS files inside /public are copied as-is on build and can only be referenced via <script src> or <link href> in html. If you want to get the URL of that file, use ${injectQuery(
specifier,
"url"
)} instead.`
);
}
const [url, resolvedId] = await normalizeUrl(specifier, start);
server?.moduleGraph.safeModulesPath.add(
fsPathFromUrl(stripBase(url, base))
);
if (url !== specifier) {
let rewriteDone = false;
if (depsOptimizer?.isOptimizedDepFile(resolvedId) && !optimizedDepChunkRE.test(resolvedId)) {
const file = cleanUrl(resolvedId);
const needsInterop = await optimizedDepNeedsInterop(
depsOptimizer.metadata,
file,
config,
ssr
);
if (needsInterop === void 0) {
if (!optimizedDepDynamicRE.test(file)) {
config.logger.error(
colors$1.red(
`Vite Error, ${url} optimized info should be defined`
)
);
}
} else if (needsInterop) {
debug$1?.(`${url} needs interop`);
interopNamedImports(
str(),
importSpecifier,
url,
index,
importer,
config
);
rewriteDone = true;
}
} else if (url.includes(browserExternalId) && source.slice(expStart, start).includes("{")) {
interopNamedImports(
str(),
importSpecifier,
url,
index,
importer,
config
);
rewriteDone = true;
}
if (!rewriteDone) {
const rewrittenUrl = JSON.stringify(url);
const s2 = isDynamicImport ? start : start - 1;
const e = isDynamicImport ? end : end + 1;
str().overwrite(s2, e, rewrittenUrl, {
contentOnly: true
});
}
}
const hmrUrl = unwrapId$1(stripBase(url, base));
const isLocalImport = !isExternalUrl(hmrUrl) && !isDataUrl(hmrUrl);
if (isLocalImport) {
orderedImportedUrls[index] = hmrUrl;
}
if (enablePartialAccept && importedBindings) {
extractImportedBindings(
resolvedId,
source,
importSpecifier,
importedBindings
);
}
if (!isDynamicImport && isLocalImport && config.server.preTransformRequests) {
const url2 = removeImportQuery(hmrUrl);
server.warmupRequest(url2, { ssr });
}
} else if (!importer.startsWith(withTrailingSlash(clientDir))) {
if (!isInNodeModules$1(importer)) {
const hasViteIgnore = hasViteIgnoreRE.test(
// complete expression inside parens
source.slice(dynamicIndex + 1, end)
);
if (!hasViteIgnore) {
this.warn(
`
` + colors$1.cyan(importerModule.file) + `
` + colors$1.reset(generateCodeFrame(source, start, end)) + colors$1.yellow(
`
The above dynamic import cannot be analyzed by Vite.
See ${colors$1.blue(
`https://github.com/rollup/plugins/tree/master/packages/dynamic-import-vars#limitations`
)} for supported dynamic import formats. If this is intended to be left as-is, you can use the /* @vite-ignore */ comment inside the import() call to suppress this warning.
`
)
);
}
}
if (!ssr) {
if (!urlIsStringRE.test(rawUrl) || isExplicitImportRequired(rawUrl.slice(1, -1))) {
needQueryInjectHelper = true;
str().overwrite(
start,
end,
`__vite__injectQuery(${rawUrl}, 'import')`,
{ contentOnly: true }
);
}
}
}
})
);
const _orderedImportedUrls = orderedImportedUrls.filter(isDefined);
const importedUrls = new Set(_orderedImportedUrls);
const staticImportedUrls = new Set(
_orderedImportedUrls.map((url) => removeTimestampQuery(url))
);
const acceptedUrls = mergeAcceptedUrls(orderedAcceptedUrls);
const acceptedExports = mergeAcceptedUrls(orderedAcceptedExports);
const isClassicWorker = importer.includes(WORKER_FILE_ID) && importer.includes("type=classic");
if (hasEnv && !isClassicWorker) {
str().prepend(getEnv(ssr));
}
if (hasHMR && !ssr && !isClassicWorker) {
debugHmr?.(
`${isSelfAccepting ? `[self-accepts]` : isPartiallySelfAccepting ? `[accepts-exports]` : acceptedUrls.size ? `[accepts-deps]` : `[detected api usage]`} ${prettifyUrl(importer, root)}`
);
str().prepend(
`import { createHotContext as __vite__createHotContext } from "${clientPublicPath}";import.meta.hot = __vite__createHotContext(${JSON.stringify(
normalizeHmrUrl(importerModule.url)
)});`
);
}
if (needQueryInjectHelper) {
if (isClassicWorker) {
str().append("\n" + __vite__injectQuery.toString());
} else {
str().prepend(
`import { injectQuery as __vite__injectQuery } from "${clientPublicPath}";`
);
}
}
const normalizedAcceptedUrls = /* @__PURE__ */ new Set();
for (const { url, start, end } of acceptedUrls) {
const [normalized] = await moduleGraph.resolveUrl(
toAbsoluteUrl(url),
ssr
);
normalizedAcceptedUrls.add(normalized);
str().overwrite(start, end, JSON.stringify(normalized), {
contentOnly: true
});
}
if (!isCSSRequest(importer) || SPECIAL_QUERY_RE.test(importer)) {
const pluginImports = this._addedImports;
if (pluginImports) {
(await Promise.all(
[...pluginImports].map((id) => normalizeUrl(id, 0, true))
)).forEach(([url]) => importedUrls.add(url));
}
if (ssr && importerModule.isSelfAccepting) {
isSelfAccepting = true;
}
if (!isSelfAccepting && isPartiallySelfAccepting && acceptedExports.size >= exports.length && exports.every((e) => acceptedExports.has(e.n))) {
isSelfAccepting = true;
}
const prunedImports = await moduleGraph.updateModuleInfo(
importerModule,
importedUrls,
importedBindings,
normalizedAcceptedUrls,
isPartiallySelfAccepting ? acceptedExports : null,
isSelfAccepting,
ssr,
staticImportedUrls
);
if (hasHMR && prunedImports) {
handlePrunedModules(prunedImports, server);
}
}
debug$1?.(
`${timeFrom(msAtStart)} ${colors$1.dim(
`[${importedUrls.size} imports rewritten] ${prettifyUrl(
importer,
root
)}`
)}`
);
if (s) {
return transformStableResult(s, importer, config);
} else {
return source;
}
}
};
}
function mergeAcceptedUrls(orderedUrls) {
const acceptedUrls = /* @__PURE__ */ new Set();
for (const urls of orderedUrls) {
if (!urls) continue;
for (const url of urls) acceptedUrls.add(url);
}
return acceptedUrls;
}
function createParseErrorInfo(importer, source) {
const isVue = importer.endsWith(".vue");
const isJsx = importer.endsWith(".jsx") || importer.endsWith(".tsx");
const maybeJSX = !isVue && isJSRequest(importer);
const probablyBinary = source.includes(
"\uFFFD"
);
const msg = isVue ? `Install @vitejs/plugin-vue to handle .vue files.` : maybeJSX ? isJsx ? `If you use tsconfig.json, make sure to not set jsx to preserve.` : `If you are using JSX, make sure to name the file with the .jsx or .tsx extension.` : `You may need to install appropriate plugins to handle the ${path$n.extname(
importer
)} file format, or if it's an asset, add "**/*${path$n.extname(
importer
)}" to \`assetsInclude\` in your configuration.`;
return {
message: `Failed to parse source for import analysis because the content contains invalid JS syntax. ` + msg,
showCodeFrame: !probablyBinary
};
}
const interopHelper = (m) => m?.__esModule ? m : { ...typeof m === "object" && !Array.isArray(m) || typeof m === "function" ? m : {}, default: m };
function interopNamedImports(str, importSpecifier, rewrittenUrl, importIndex, importer, config) {
const source = str.original;
const {
s: start,
e: end,
ss: expStart,
se: expEnd,
d: dynamicIndex
} = importSpecifier;
const exp = source.slice(expStart, expEnd);
if (dynamicIndex > -1) {
str.overwrite(
expStart,
expEnd,
`import('${rewrittenUrl}').then(m => (${interopHelper.toString()})(m.default))` + getLineBreaks(exp),
{ contentOnly: true }
);
} else {
const rawUrl = source.slice(start, end);
const rewritten = transformCjsImport(
exp,
rewrittenUrl,
rawUrl,
importIndex,
importer,
config
);
if (rewritten) {
str.overwrite(expStart, expEnd, rewritten + getLineBreaks(exp), {
contentOnly: true
});
} else {
str.overwrite(
start,
end,
rewrittenUrl + getLineBreaks(source.slice(start, end)),
{
contentOnly: true
}
);
}
}
}
function getLineBreaks(str) {
return str.includes("\n") ? "\n".repeat(str.split("\n").length - 1) : "";
}
function transformCjsImport(importExp, url, rawUrl, importIndex, importer, config) {
const node = parseAst(importExp).body[0];
if (config.command === "serve" && node.type === "ExportAllDeclaration" && !node.exported) {
config.logger.warn(
colors$1.yellow(
`
Unable to interop \`${importExp}\` in ${importer}, this may lose module exports. Please export "${rawUrl}" as ESM or use named exports instead, e.g. \`export { A, B } from "${rawUrl}"\``
)
);
} else if (node.type === "ImportDeclaration" || node.type === "ExportNamedDeclaration") {
if (!node.specifiers.length) {
return `import "${url}"`;
}
const importNames = [];
const exportNames = [];
let defaultExports = "";
for (const spec of node.specifiers) {
if (spec.type === "ImportSpecifier" && spec.imported.type === "Identifier") {
const importedName = spec.imported.name;
const localName = spec.local.name;
importNames.push({ importedName, localName });
} else if (spec.type === "ImportDefaultSpecifier") {
importNames.push({
importedName: "default",
localName: spec.local.name
});
} else if (spec.type === "ImportNamespaceSpecifier") {
importNames.push({ importedName: "*", localName: spec.local.name });
} else if (spec.type === "ExportSpecifier" && spec.exported.type === "Identifier") {
const importedName = spec.local.name;
const exportedName = spec.exported.name;
if (exportedName === "default") {
defaultExports = makeLegalIdentifier(
`__vite__cjsExportDefault_${importIndex}`
);
importNames.push({ importedName, localName: defaultExports });
} else {
const localName = makeLegalIdentifier(
`__vite__cjsExport_${exportedName}`
);
importNames.push({ importedName, localName });
exportNames.push(`${localName} as ${exportedName}`);
}
}
}
const cjsModuleName = makeLegalIdentifier(
`__vite__cjsImport${importIndex}_${rawUrl}`
);
const lines = [`import ${cjsModuleName} from "${url}"`];
importNames.forEach(({ importedName, localName }) => {
if (importedName === "*") {
lines.push(
`const ${localName} = (${interopHelper.toString()})(${cjsModuleName})`
);
} else if (importedName === "default") {
lines.push(
`const ${localName} = ${cjsModuleName}.__esModule ? ${cjsModuleName}.default : ${cjsModuleName}`
);
} else {
lines.push(`const ${localName} = ${cjsModuleName}["${importedName}"]`);
}
});
if (defaultExports) {
lines.push(`export default ${defaultExports}`);
}
if (exportNames.length) {
lines.push(`export { ${exportNames.join(", ")} }`);
}
return lines.join("; ");
}
}
function __vite__injectQuery(url, queryToInject) {
if (url[0] !== "." && url[0] !== "/") {
return url;
}
const pathname = url.replace(/[?#].*$/, "");
const { search, hash } = new URL(url, "http://vitejs.dev");
return `${pathname}?${queryToInject}${search ? `&` + search.slice(1) : ""}${hash || ""}`;
}
const isModernFlag = `__VITE_IS_MODERN__`;
const preloadMethod = `__vitePreload`;
const preloadMarker = `__VITE_PRELOAD__`;
const preloadHelperId = "\0vite/preload-helper.js";
const preloadMarkerRE = new RegExp(preloadMarker, "g");
const dynamicImportPrefixRE = /import\s*\(/;
const dynamicImportTreeshakenRE = /((?:\bconst\s+|\blet\s+|\bvar\s+|,\s*)(\{[^{}.=]+\})\s*=\s*await\s+import\([^)]+\))|(\(\s*await\s+import\([^)]+\)\s*\)(\??\.[\w$]+))|\bimport\([^)]+\)(\s*\.then\(\s*(?:function\s*)?\(\s*\{([^{}.=]+)\}\))/g;
function toRelativePath(filename, importer) {
const relPath = path$n.posix.relative(path$n.posix.dirname(importer), filename);
return relPath[0] === "." ? relPath : `./${relPath}`;
}
function indexOfMatchInSlice(str, reg, pos = 0) {
reg.lastIndex = pos;
const result = reg.exec(str);
return result?.index ?? -1;
}
function detectScriptRel() {
const relList = typeof document !== "undefined" && document.createElement("link").relList;
return relList && relList.supports && relList.supports("modulepreload") ? "modulepreload" : "preload";
}
function preload(baseModule, deps, importerUrl) {
let promise = Promise.resolve();
if (__VITE_IS_MODERN__ && deps && deps.length > 0) {
const links = document.getElementsByTagName("link");
const cspNonceMeta = document.querySelector(
"meta[property=csp-nonce]"
);
const cspNonce = cspNonceMeta?.nonce || cspNonceMeta?.getAttribute("nonce");
promise = Promise.all(
deps.map((dep) => {
dep = assetsURL(dep, importerUrl);
if (dep in seen) return;
seen[dep] = true;
const isCss = dep.endsWith(".css");
const cssSelector = isCss ? '[rel="stylesheet"]' : "";
const isBaseRelative = !!importerUrl;
if (isBaseRelative) {
for (let i = links.length - 1; i >= 0; i--) {
const link2 = links[i];
if (link2.href === dep && (!isCss || link2.rel === "stylesheet")) {
return;
}
}
} else if (document.querySelector(`link[href="${dep}"]${cssSelector}`)) {
return;
}
const link = document.createElement("link");
link.rel = isCss ? "stylesheet" : scriptRel;
if (!isCss) {
link.as = "script";
link.crossOrigin = "";
}
link.href = dep;
if (cspNonce) {
link.setAttribute("nonce", cspNonce);
}
document.head.appendChild(link);
if (isCss) {
return new Promise((res, rej) => {
link.addEventListener("load", res);
link.addEventListener(
"error",
() => rej(new Error(`Unable to preload CSS for ${dep}`))
);
});
}
})
);
}
return promise.then(() => baseModule()).catch((err) => {
const e = new Event("vite:preloadError", {
cancelable: true
});
e.payload = err;
window.dispatchEvent(e);
if (!e.defaultPrevented) {
throw err;
}
});
}
function buildImportAnalysisPlugin(config) {
const ssr = !!config.build.ssr;
const isWorker = config.isWorker;
const insertPreload = !(ssr || !!config.build.lib || isWorker);
const renderBuiltUrl = config.experimental.renderBuiltUrl;
const isRelativeBase = config.base === "./" || config.base === "";
const { modulePreload } = config.build;
const scriptRel2 = modulePreload && modulePreload.polyfill ? `'modulepreload'` : `(${detectScriptRel.toString()})()`;
const assetsURL2 = renderBuiltUrl || isRelativeBase ? (
// If `experimental.renderBuiltUrl` is used, the dependencies might be relative to the current chunk.
// If relative base is used, the dependencies are relative to the current chunk.
// The importerUrl is passed as third parameter to __vitePreload in this case
`function(dep, importerUrl) { return new URL(dep, importerUrl).href }`
) : (
// If the base isn't relative, then the deps are relative to the projects `outDir` and the base
// is appended inside __vitePreload too.
`function(dep) { return ${JSON.stringify(config.base)}+dep }`
);
const preloadCode = `const scriptRel = ${scriptRel2};const assetsURL = ${assetsURL2};const seen = {};export const ${preloadMethod} = ${preload.toString()}`;
return {
name: "vite:build-import-analysis",
resolveId(id) {
if (id === preloadHelperId) {
return id;
}
},
load(id) {
if (id === preloadHelperId) {
return preloadCode;
}
},
async transform(source, importer) {
if (isInNodeModules$1(importer) && !dynamicImportPrefixRE.test(source)) {
return;
}
await init;
let imports = [];
try {
imports = parse$d(source)[0];
} catch (_e) {
const e = _e;
const { message, showCodeFrame } = createParseErrorInfo(
importer,
source
);
this.error(message, showCodeFrame ? e.idx : void 0);
}
if (!imports.length) {
return null;
}
const dynamicImports = {};
if (insertPreload) {
let match;
while (match = dynamicImportTreeshakenRE.exec(source)) {
if (match[1]) {
dynamicImports[dynamicImportTreeshakenRE.lastIndex] = {
declaration: `const ${match[2]}`,
names: match[2]?.trim()
};
continue;
}
if (match[3]) {
let names2 = /\.([^.?]+)/.exec(match[4])?.[1] || "";
if (names2 === "default") {
names2 = "default: __vite_default__";
}
dynamicImports[dynamicImportTreeshakenRE.lastIndex - match[4]?.length - 1] = { declaration: `const {${names2}}`, names: `{ ${names2} }` };
continue;
}
const names = match[6]?.trim();
dynamicImports[dynamicImportTreeshakenRE.lastIndex - match[5]?.length] = { declaration: `const {${names}}`, names: `{ ${names} }` };
}
}
let s;
const str = () => s || (s = new MagicString(source));
let needPreloadHelper = false;
for (let index = 0; index < imports.length; index++) {
const {
s: start,
e: end,
ss: expStart,
se: expEnd,
d: dynamicIndex,
a: attributeIndex
} = imports[index];
const isDynamicImport = dynamicIndex > -1;
if (!isDynamicImport && attributeIndex > -1) {
str().remove(end + 1, expEnd);
}
if (isDynamicImport && insertPreload && // Only preload static urls
(source[start] === '"' || source[start] === "'" || source[start] === "`")) {
needPreloadHelper = true;
const { declaration, names } = dynamicImports[expEnd] || {};
if (names) {
str().prependLeft(
expStart,
`${preloadMethod}(async () => { ${declaration} = await `
);
str().appendRight(expEnd, `;return ${names}}`);
} else {
str().prependLeft(expStart, `${preloadMethod}(() => `);
}
str().appendRight(
expEnd,
`,${isModernFlag}?${preloadMarker}:void 0${renderBuiltUrl || isRelativeBase ? ",import.meta.url" : ""})`
);
}
}
if (needPreloadHelper && insertPreload && !source.includes(`const ${preloadMethod} =`)) {
str().prepend(`import { ${preloadMethod} } from "${preloadHelperId}";`);
}
if (s) {
return {
code: s.toString(),
map: config.build.sourcemap ? s.generateMap({ hires: "boundary" }) : null
};
}
},
renderChunk(code, _, { format }) {
if (code.indexOf(isModernFlag) > -1) {
const re = new RegExp(isModernFlag, "g");
const isModern = String(format === "es");
if (config.build.sourcemap) {
const s = new MagicString(code);
let match;
while (match = re.exec(code)) {
s.update(match.index, match.index + isModernFlag.length, isModern);
}
return {
code: s.toString(),
map: s.generateMap({ hires: "boundary" })
};
} else {
return code.replace(re, isModern);
}
}
return null;
},
generateBundle({ format }, bundle) {
if (format !== "es") {
return;
}
if (!insertPreload) {
const removedPureCssFiles = removedPureCssFilesCache.get(config);
if (removedPureCssFiles && removedPureCssFiles.size > 0) {
for (const file in bundle) {
const chunk = bundle[file];
if (chunk.type === "chunk" && chunk.code.includes("import")) {
const code = chunk.code;
let imports;
try {
imports = parse$d(code)[0].filter((i) => i.d > -1);
} catch (e) {
const loc = numberToPos(code, e.idx);
this.error({
name: e.name,
message: e.message,
stack: e.stack,
cause: e.cause,
pos: e.idx,
loc: { ...loc, file: chunk.fileName },
frame: generateCodeFrame(code, loc)
});
}
for (const imp of imports) {
const {
n: name,
s: start,
e: end,
ss: expStart,
se: expEnd
} = imp;
let url = name;
if (!url) {
const rawUrl = code.slice(start, end);
if (rawUrl[0] === `"` && rawUrl[rawUrl.length - 1] === `"`)
url = rawUrl.slice(1, -1);
}
if (!url) continue;
const normalizedFile = path$n.posix.join(
path$n.posix.dirname(chunk.fileName),
url
);
if (removedPureCssFiles.has(normalizedFile)) {
chunk.code = chunk.code.slice(0, expStart) + `Promise.resolve({${"".padEnd(expEnd - expStart - 19, " ")}})` + chunk.code.slice(expEnd);
}
}
}
}
}
return;
}
for (const file in bundle) {
const chunk = bundle[file];
if (chunk.type === "chunk" && chunk.code.indexOf(preloadMarker) > -1) {
const code = chunk.code;
let imports;
try {
imports = parse$d(code)[0].filter((i) => i.d > -1);
} catch (e) {
const loc = numberToPos(code, e.idx);
this.error({
name: e.name,
message: e.message,
stack: e.stack,
cause: e.cause,
pos: e.idx,
loc: { ...loc, file: chunk.fileName },
frame: generateCodeFrame(code, loc)
});
}
const s = new MagicString(code);
const rewroteMarkerStartPos = /* @__PURE__ */ new Set();
const fileDeps = [];
const addFileDep = (url, runtime = false) => {
const index = fileDeps.findIndex((dep) => dep.url === url);
if (index === -1) {
return fileDeps.push({ url, runtime }) - 1;
} else {
return index;
}
};
if (imports.length) {
for (let index = 0; index < imports.length; index++) {
const {
n: name,
s: start,
e: end,
ss: expStart,
se: expEnd
} = imports[index];
let url = name;
if (!url) {
const rawUrl = code.slice(start, end);
if (rawUrl[0] === `"` && rawUrl[rawUrl.length - 1] === `"`)
url = rawUrl.slice(1, -1);
}
const deps = /* @__PURE__ */ new Set();
let hasRemovedPureCssChunk = false;
let normalizedFile = void 0;
if (url) {
normalizedFile = path$n.posix.join(
path$n.posix.dirname(chunk.fileName),
url
);
const ownerFilename = chunk.fileName;
const analyzed = /* @__PURE__ */ new Set();
const addDeps = (filename) => {
if (filename === ownerFilename) return;
if (analyzed.has(filename)) return;
analyzed.add(filename);
const chunk2 = bundle[filename];
if (chunk2) {
deps.add(chunk2.fileName);
if (chunk2.type === "chunk") {
chunk2.imports.forEach(addDeps);
chunk2.viteMetadata.importedCss.forEach((file2) => {
deps.add(file2);
});
}
} else {
const removedPureCssFiles = removedPureCssFilesCache.get(config);
const chunk3 = removedPureCssFiles.get(filename);
if (chunk3) {
if (chunk3.viteMetadata.importedCss.size) {
chunk3.viteMetadata.importedCss.forEach((file2) => {
deps.add(file2);
});
hasRemovedPureCssChunk = true;
}
s.update(expStart, expEnd, "Promise.resolve({})");
}
}
};
addDeps(normalizedFile);
}
let markerStartPos2 = indexOfMatchInSlice(
code,
preloadMarkerRE,
end
);
if (markerStartPos2 === -1 && imports.length === 1) {
markerStartPos2 = indexOfMatchInSlice(code, preloadMarkerRE);
}
if (markerStartPos2 > 0) {
let depsArray = deps.size > 1 || // main chunk is removed
hasRemovedPureCssChunk && deps.size > 0 ? modulePreload === false ? (
// CSS deps use the same mechanism as module preloads, so even if disabled,
// we still need to pass these deps to the preload helper in dynamic imports.
[...deps].filter((d) => d.endsWith(".css"))
) : [...deps] : [];
const resolveDependencies = modulePreload ? modulePreload.resolveDependencies : void 0;
if (resolveDependencies && normalizedFile) {
const cssDeps = [];
const otherDeps = [];
for (const dep of depsArray) {
(dep.endsWith(".css") ? cssDeps : otherDeps).push(dep);
}
depsArray = [
...resolveDependencies(normalizedFile, otherDeps, {
hostId: file,
hostType: "js"
}),
...cssDeps
];
}
let renderedDeps;
if (renderBuiltUrl) {
renderedDeps = depsArray.map((dep) => {
const replacement = toOutputFilePathInJS(
dep,
"asset",
chunk.fileName,
"js",
config,
toRelativePath
);
if (typeof replacement === "string") {
return addFileDep(replacement);
}
return addFileDep(replacement.runtime, true);
});
} else {
renderedDeps = depsArray.map(
(d) => (
// Don't include the assets dir if the default asset file names
// are used, the path will be reconstructed by the import preload helper
isRelativeBase ? addFileDep(toRelativePath(d, file)) : addFileDep(d)
)
);
}
s.update(
markerStartPos2,
markerStartPos2 + preloadMarker.length,
renderedDeps.length > 0 ? `__vite__mapDeps([${renderedDeps.join(",")}])` : `[]`
);
rewroteMarkerStartPos.add(markerStartPos2);
}
}
}
if (fileDeps.length > 0) {
const fileDepsCode = `[${fileDeps.map(
(fileDep) => fileDep.runtime ? fileDep.url : JSON.stringify(fileDep.url)
).join(",")}]`;
const mapDepsCode = `const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=${fileDepsCode})))=>i.map(i=>d[i]);
`;
if (code.startsWith("#!")) {
s.prependLeft(code.indexOf("\n") + 1, mapDepsCode);
} else {
s.prepend(mapDepsCode);
}
}
let markerStartPos = indexOfMatchInSlice(code, preloadMarkerRE);
while (markerStartPos >= 0) {
if (!rewroteMarkerStartPos.has(markerStartPos)) {
s.update(
markerStartPos,
markerStartPos + preloadMarker.length,
"void 0"
);
}
markerStartPos = indexOfMatchInSlice(
code,
preloadMarkerRE,
markerStartPos + preloadMarker.length
);
}
if (s.hasChanged()) {
chunk.code = s.toString();
if (config.build.sourcemap && chunk.map) {
const nextMap = s.generateMap({
source: chunk.fileName,
hires: "boundary"
});
const map = combineSourcemaps(chunk.fileName, [
nextMap,
chunk.map
]);
map.toUrl = () => genSourceMapUrl(map);
chunk.map = map;
if (config.build.sourcemap === "inline") {
chunk.code = chunk.code.replace(
convertSourceMap.mapFileCommentRegex,
""
);
chunk.code += `
//# sourceMappingURL=${genSourceMapUrl(map)}`;
} else if (config.build.sourcemap) {
const mapAsset = bundle[chunk.fileName + ".map"];
if (mapAsset && mapAsset.type === "asset") {
mapAsset.source = map.toString();
}
}
}
}
}
}
}
};
}
function ssrManifestPlugin(config) {
const ssrManifest = {};
const base = config.base;
return {
name: "vite:ssr-manifest",
generateBundle(_options, bundle) {
for (const file in bundle) {
const chunk = bundle[file];
if (chunk.type === "chunk") {
for (const id in chunk.modules) {
const normalizedId = normalizePath$3(relative$2(config.root, id));
const mappedChunks = ssrManifest[normalizedId] ?? (ssrManifest[normalizedId] = []);
if (!chunk.isEntry) {
mappedChunks.push(joinUrlSegments(base, chunk.fileName));
chunk.viteMetadata.importedCss.forEach((file2) => {
mappedChunks.push(joinUrlSegments(base, file2));
});
}
chunk.viteMetadata.importedAssets.forEach((file2) => {
mappedChunks.push(joinUrlSegments(base, file2));
});
}
if (chunk.code.includes(preloadMethod)) {
const code = chunk.code;
let imports = [];
try {
imports = parse$d(code)[0].filter((i) => i.n && i.d > -1);
} catch (_e) {
const e = _e;
const loc = numberToPos(code, e.idx);
this.error({
name: e.name,
message: e.message,
stack: e.stack,
cause: e.cause,
pos: e.idx,
loc: { ...loc, file: chunk.fileName },
frame: generateCodeFrame(code, loc)
});
}
if (imports.length) {
for (let index = 0; index < imports.length; index++) {
const { s: start, e: end, n: name } = imports[index];
const url = code.slice(start, end);
const deps = [];
const ownerFilename = chunk.fileName;
const analyzed = /* @__PURE__ */ new Set();
const addDeps = (filename) => {
if (filename === ownerFilename) return;
if (analyzed.has(filename)) return;
analyzed.add(filename);
const chunk2 = bundle[filename];
if (chunk2) {
chunk2.viteMetadata.importedCss.forEach((file2) => {
deps.push(joinUrlSegments(base, file2));
});
chunk2.imports.forEach(addDeps);
}
};
const normalizedFile = normalizePath$3(
join$2(dirname$2(chunk.fileName), url.slice(1, -1))
);
addDeps(normalizedFile);
ssrManifest[basename$2(name)] = deps;
}
}
}
}
}
this.emitFile({
fileName: typeof config.build.ssrManifest === "string" ? config.build.ssrManifest : ".vite/ssr-manifest.json",
type: "asset",
source: JSON.stringify(sortObjectKeys(ssrManifest), void 0, 2)
});
}
};
}
function loadFallbackPlugin() {
return {
name: "vite:load-fallback",
async load(id) {
try {
const cleanedId = cleanUrl(id);
const content = await fsp.readFile(cleanedId, "utf-8");
this.addWatchFile(cleanedId);
return content;
} catch (e) {
const content = await fsp.readFile(id, "utf-8");
this.addWatchFile(id);
return content;
}
}
};
}
function completeSystemWrapPlugin() {
const SystemJSWrapRE = /System.register\(.*?(\(exports\)|\(\))/g;
return {
name: "vite:force-systemjs-wrap-complete",
renderChunk(code, chunk, opts) {
if (opts.format === "system") {
return {
code: code.replace(
SystemJSWrapRE,
(s, s1) => s.replace(s1, "(exports, module)")
),
map: null
};
}
}
};
}
function resolveBuildOptions(raw, logger, root) {
const deprecatedPolyfillModulePreload = raw?.polyfillModulePreload;
if (raw) {
const { polyfillModulePreload, ...rest } = raw;
raw = rest;
if (deprecatedPolyfillModulePreload !== void 0) {
logger.warn(
"polyfillModulePreload is deprecated. Use modulePreload.polyfill instead."
);
}
if (deprecatedPolyfillModulePreload === false && raw.modulePreload === void 0) {
raw.modulePreload = { polyfill: false };
}
}
const modulePreload = raw?.modulePreload;
const defaultModulePreload = {
polyfill: true
};
const defaultBuildOptions = {
outDir: "dist",
assetsDir: "assets",
assetsInlineLimit: DEFAULT_ASSETS_INLINE_LIMIT,
cssCodeSplit: !raw?.lib,
sourcemap: false,
rollupOptions: {},
minify: raw?.ssr ? false : "esbuild",
terserOptions: {},
write: true,
emptyOutDir: null,
copyPublicDir: true,
manifest: false,
lib: false,
ssr: false,
ssrManifest: false,
ssrEmitAssets: false,
reportCompressedSize: true,
chunkSizeWarningLimit: 500,
watch: null
};
const userBuildOptions = raw ? mergeConfig(defaultBuildOptions, raw) : defaultBuildOptions;
const resolved = {
target: "modules",
cssTarget: false,
...userBuildOptions,
commonjsOptions: {
include: [/node_modules/],
extensions: [".js", ".cjs"],
...userBuildOptions.commonjsOptions
},
dynamicImportVarsOptions: {
warnOnError: true,
exclude: [/node_modules/],
...userBuildOptions.dynamicImportVarsOptions
},
// Resolve to false | object
modulePreload: modulePreload === false ? false : typeof modulePreload === "object" ? {
...defaultModulePreload,
...modulePreload
} : defaultModulePreload
};
if (resolved.target === "modules") {
resolved.target = ESBUILD_MODULES_TARGET;
} else if (resolved.target === "esnext" && resolved.minify === "terser") {
try {
const terserPackageJsonPath = requireResolveFromRootWithFallback(
root,
"terser/package.json"
);
const terserPackageJson = JSON.parse(
fs__default.readFileSync(terserPackageJsonPath, "utf-8")
);
const v = terserPackageJson.version.split(".");
if (v[0] === "5" && v[1] < 16) {
resolved.target = "es2021";
}
} catch {
}
}
if (!resolved.cssTarget) {
resolved.cssTarget = resolved.target;
}
if (resolved.minify === "false") {
resolved.minify = false;
} else if (resolved.minify === true) {
resolved.minify = "esbuild";
}
if (resolved.cssMinify == null) {
resolved.cssMinify = !!resolved.minify;
}
return resolved;
}
async function resolveBuildPlugins(config) {
const options = config.build;
const { commonjsOptions } = options;
const usePluginCommonjs = !Array.isArray(commonjsOptions?.include) || commonjsOptions?.include.length !== 0;
const rollupOptionsPlugins = options.rollupOptions.plugins;
return {
pre: [
completeSystemWrapPlugin(),
...usePluginCommonjs ? [commonjs(options.commonjsOptions)] : [],
dataURIPlugin(),
...(await asyncFlatten(arraify(rollupOptionsPlugins))).filter(
Boolean
),
...config.isWorker ? [webWorkerPostPlugin()] : []
],
post: [
buildImportAnalysisPlugin(config),
...config.esbuild !== false ? [buildEsbuildPlugin(config)] : [],
...options.minify ? [terserPlugin(config)] : [],
...!config.isWorker ? [
...options.manifest ? [manifestPlugin(config)] : [],
...options.ssrManifest ? [ssrManifestPlugin(config)] : [],
buildReporterPlugin(config)
] : [],
loadFallbackPlugin()
]
};
}
async function build(inlineConfig = {}) {
const config = await resolveConfig(
inlineConfig,
"build",
"production",
"production"
);
const options = config.build;
const { logger } = config;
const ssr = !!options.ssr;
const libOptions = options.lib;
logger.info(
colors$1.cyan(
`vite v${VERSION} ${colors$1.green(
`building ${ssr ? `SSR bundle ` : ``}for ${config.mode}...`
)}`
)
);
const resolve = (p) => path$n.resolve(config.root, p);
const input = libOptions ? options.rollupOptions?.input || (typeof libOptions.entry === "string" ? resolve(libOptions.entry) : Array.isArray(libOptions.entry) ? libOptions.entry.map(resolve) : Object.fromEntries(
Object.entries(libOptions.entry).map(([alias, file]) => [
alias,
resolve(file)
])
)) : typeof options.ssr === "string" ? resolve(options.ssr) : options.rollupOptions?.input || resolve("index.html");
if (ssr && typeof input === "string" && input.endsWith(".html")) {
throw new Error(
`rollupOptions.input should not be an html file when building for SSR. Please specify a dedicated SSR entry.`
);
}
if (config.build.cssCodeSplit === false) {
const inputs = typeof input === "string" ? [input] : Array.isArray(input) ? input : Object.values(input);
if (inputs.some((input2) => input2.endsWith(".css"))) {
throw new Error(
`When "build.cssCodeSplit: false" is set, "rollupOptions.input" should not include CSS files.`
);
}
}
const outDir = resolve(options.outDir);
const plugins = ssr ? config.plugins.map((p) => injectSsrFlagToHooks(p)) : config.plugins;
const rollupOptions = {
preserveEntrySignatures: ssr ? "allow-extension" : libOptions ? "strict" : false,
cache: config.build.watch ? void 0 : false,
...options.rollupOptions,
input,
plugins,
external: options.rollupOptions?.external,
onwarn(warning, warn) {
onRollupWarning(warning, warn, config);
}
};
function extractStack(e) {
const { stack, name = "Error", message } = e;
if (!stack) {
return stack;
}
const expectedPrefix = `${name}: ${message}
`;
if (stack.startsWith(expectedPrefix)) {
return stack.slice(expectedPrefix.length);
}
return stack;
}
const normalizeCodeFrame = (frame) => {
const trimmedPadding = frame.replace(/^\n|\n$/g, "");
return `
${trimmedPadding}
`;
};
const enhanceRollupError = (e) => {
const stackOnly = extractStack(e);
let msg = colors$1.red((e.plugin ? `[${e.plugin}] ` : "") + e.message);
if (e.id) {
msg += `
file: ${colors$1.cyan(
e.id + (e.loc ? `:${e.loc.line}:${e.loc.column}` : "")
)}`;
}
if (e.frame) {
msg += `
` + colors$1.yellow(normalizeCodeFrame(e.frame));
}
e.message = msg;
if (stackOnly !== void 0) {
e.stack = `${e.message}
${stackOnly}`;
}
};
const outputBuildError = (e) => {
enhanceRollupError(e);
clearLine();
logger.error(e.message, { error: e });
};
let bundle;
let startTime;
try {
const buildOutputOptions = (output = {}) => {
if (output.output) {
logger.warn(
`You've set "rollupOptions.output.output" in your config. This is deprecated and will override all Vite.js default output options. Please use "rollupOptions.output" instead.`
);
}
if (output.file) {
throw new Error(
`Vite does not support "rollupOptions.output.file". Please use "rollupOptions.output.dir" and "rollupOptions.output.entryFileNames" instead.`
);
}
if (output.sourcemap) {
logger.warnOnce(
colors$1.yellow(
`Vite does not support "rollupOptions.output.sourcemap". Please use "build.sourcemap" instead.`
)
);
}
const ssrNodeBuild = ssr && config.ssr.target === "node";
const ssrWorkerBuild = ssr && config.ssr.target === "webworker";
const format = output.format || "es";
const jsExt = ssrNodeBuild || libOptions ? resolveOutputJsExtension(
format,
findNearestPackageData(config.root, config.packageCache)?.data.type
) : "js";
return {
dir: outDir,
// Default format is 'es' for regular and for SSR builds
format,
exports: "auto",
sourcemap: options.sourcemap,
name: libOptions ? libOptions.name : void 0,
hoistTransitiveImports: libOptions ? false : void 0,
// es2015 enables `generatedCode.symbols`
// - #764 add `Symbol.toStringTag` when build es module into cjs chunk
// - #1048 add `Symbol.toStringTag` for module default export
generatedCode: "es2015",
entryFileNames: ssr ? `[name].${jsExt}` : libOptions ? ({ name }) => resolveLibFilename(
libOptions,
format,
name,
config.root,
jsExt,
config.packageCache
) : path$n.posix.join(options.assetsDir, `[name]-[hash].${jsExt}`),
chunkFileNames: libOptions ? `[name]-[hash].${jsExt}` : path$n.posix.join(options.assetsDir, `[name]-[hash].${jsExt}`),
assetFileNames: libOptions ? `[name].[ext]` : path$n.posix.join(options.assetsDir, `[name]-[hash].[ext]`),
inlineDynamicImports: output.format === "umd" || output.format === "iife" || ssrWorkerBuild && (typeof input === "string" || Object.keys(input).length === 1),
...output
};
};
const outputs = resolveBuildOutputs(
options.rollupOptions?.output,
libOptions,
logger
);
const normalizedOutputs = [];
if (Array.isArray(outputs)) {
for (const resolvedOutput of outputs) {
normalizedOutputs.push(buildOutputOptions(resolvedOutput));
}
} else {
normalizedOutputs.push(buildOutputOptions(outputs));
}
const resolvedOutDirs = getResolvedOutDirs(
config.root,
options.outDir,
options.rollupOptions?.output
);
const emptyOutDir = resolveEmptyOutDir(
options.emptyOutDir,
config.root,
resolvedOutDirs,
logger
);
if (config.build.watch) {
logger.info(colors$1.cyan(`
watching for file changes...`));
const resolvedChokidarOptions = resolveChokidarOptions(
config,
config.build.watch.chokidar,
resolvedOutDirs,
emptyOutDir
);
const { watch } = await import('rollup');
const watcher = watch({
...rollupOptions,
output: normalizedOutputs,
watch: {
...config.build.watch,
chokidar: resolvedChokidarOptions
}
});
watcher.on("event", (event) => {
if (event.code === "BUNDLE_START") {
logger.info(colors$1.cyan(`
build started...`));
if (options.write) {
prepareOutDir(resolvedOutDirs, emptyOutDir, config);
}
} else if (event.code === "BUNDLE_END") {
event.result.close();
logger.info(colors$1.cyan(`built in ${event.duration}ms.`));
} else if (event.code === "ERROR") {
outputBuildError(event.error);
}
});
return watcher;
}
const { rollup } = await import('rollup');
startTime = Date.now();
bundle = await rollup(rollupOptions);
if (options.write) {
prepareOutDir(resolvedOutDirs, emptyOutDir, config);
}
const res = [];
for (const output of normalizedOutputs) {
res.push(await bundle[options.write ? "write" : "generate"](output));
}
logger.info(
`${colors$1.green(`\u2713 built in ${displayTime(Date.now() - startTime)}`)}`
);
return Array.isArray(outputs) ? res : res[0];
} catch (e) {
enhanceRollupError(e);
clearLine();
if (startTime) {
logger.error(
`${colors$1.red("x")} Build failed in ${displayTime(Date.now() - startTime)}`
);
startTime = void 0;
}
throw e;
} finally {
if (bundle) await bundle.close();
}
}
function prepareOutDir(outDirs, emptyOutDir, config) {
const outDirsArray = [...outDirs];
for (const outDir of outDirs) {
if (emptyOutDir !== false && fs__default.existsSync(outDir)) {
const skipDirs = outDirsArray.map((dir) => {
const relative = path$n.relative(outDir, dir);
if (relative && !relative.startsWith("..") && !path$n.isAbsolute(relative)) {
return relative;
}
return "";
}).filter(Boolean);
emptyDir(outDir, [...skipDirs, ".git"]);
}
if (config.build.copyPublicDir && config.publicDir && fs__default.existsSync(config.publicDir)) {
if (!areSeparateFolders(outDir, config.publicDir)) {
config.logger.warn(
colors$1.yellow(
`
${colors$1.bold(
`(!)`
)} The public directory feature may not work correctly. outDir ${colors$1.white(
colors$1.dim(outDir)
)} and publicDir ${colors$1.white(
colors$1.dim(config.publicDir)
)} are not separate folders.
`
)
);
}
copyDir(config.publicDir, outDir);
}
}
}
function getPkgName(name) {
return name?.[0] === "@" ? name.split("/")[1] : name;
}
function resolveOutputJsExtension(format, type = "commonjs") {
if (type === "module") {
return format === "cjs" || format === "umd" ? "cjs" : "js";
} else {
return format === "es" ? "mjs" : "js";
}
}
function resolveLibFilename(libOptions, format, entryName, root, extension, packageCache) {
if (typeof libOptions.fileName === "function") {
return libOptions.fileName(format, entryName);
}
const packageJson = findNearestPackageData(root, packageCache)?.data;
const name = libOptions.fileName || (packageJson && typeof libOptions.entry === "string" ? getPkgName(packageJson.name) : entryName);
if (!name)
throw new Error(
'Name in package.json is required if option "build.lib.fileName" is not provided.'
);
extension ??= resolveOutputJsExtension(format, packageJson?.type);
if (format === "cjs" || format === "es") {
return `${name}.${extension}`;
}
return `${name}.${format}.${extension}`;
}
function resolveBuildOutputs(outputs, libOptions, logger) {
if (libOptions) {
const libHasMultipleEntries = typeof libOptions.entry !== "string" && Object.values(libOptions.entry).length > 1;
const libFormats = libOptions.formats || (libHasMultipleEntries ? ["es", "cjs"] : ["es", "umd"]);
if (!Array.isArray(outputs)) {
if (libFormats.includes("umd") || libFormats.includes("iife")) {
if (libHasMultipleEntries) {
throw new Error(
'Multiple entry points are not supported when output formats include "umd" or "iife".'
);
}
if (!libOptions.name) {
throw new Error(
'Option "build.lib.name" is required when output formats include "umd" or "iife".'
);
}
}
return libFormats.map((format) => ({ ...outputs, format }));
}
if (libOptions.formats) {
logger.warn(
colors$1.yellow(
'"build.lib.formats" will be ignored because "build.rollupOptions.output" is already an array format.'
)
);
}
outputs.forEach((output) => {
if ((output.format === "umd" || output.format === "iife") && !output.name) {
throw new Error(
'Entries in "build.rollupOptions.output" must specify "name" when the format is "umd" or "iife".'
);
}
});
}
return outputs;
}
const warningIgnoreList = [`CIRCULAR_DEPENDENCY`, `THIS_IS_UNDEFINED`];
const dynamicImportWarningIgnoreList = [
`Unsupported expression`,
`statically analyzed`
];
function clearLine() {
const tty = process.stdout.isTTY && !process.env.CI;
if (tty) {
process.stdout.clearLine(0);
process.stdout.cursorTo(0);
}
}
function onRollupWarning(warning, warn, config) {
const viteWarn = (warnLog) => {
let warning2;
if (typeof warnLog === "function") {
warning2 = warnLog();
} else {
warning2 = warnLog;
}
if (typeof warning2 === "object") {
if (warning2.code === "UNRESOLVED_IMPORT") {
const id = warning2.id;
const exporter = warning2.exporter;
if (!id || !id.endsWith("?commonjs-external")) {
throw new Error(
`[vite]: Rollup failed to resolve import "${exporter}" from "${id}".
This is most likely unintended because it can break your application at runtime.
If you do want to externalize this module explicitly add it to
\`build.rollupOptions.external\``
);
}
}
if (warning2.plugin === "rollup-plugin-dynamic-import-variables" && dynamicImportWarningIgnoreList.some(
(msg) => warning2.message.includes(msg)
)) {
return;
}
if (warningIgnoreList.includes(warning2.code)) {
return;
}
if (warning2.code === "PLUGIN_WARNING") {
config.logger.warn(
`${colors$1.bold(
colors$1.yellow(`[plugin:${warning2.plugin}]`)
)} ${colors$1.yellow(warning2.message)}`
);
return;
}
}
warn(warnLog);
};
clearLine();
const userOnWarn = config.build.rollupOptions?.onwarn;
if (userOnWarn) {
userOnWarn(warning, viteWarn);
} else {
viteWarn(warning);
}
}
function resolveUserExternal(user, id, parentId, isResolved) {
if (typeof user === "function") {
return user(id, parentId, isResolved);
} else if (Array.isArray(user)) {
return user.some((test) => isExternal(id, test));
} else {
return isExternal(id, user);
}
}
function isExternal(id, test) {
if (typeof test === "string") {
return id === test;
} else {
return test.test(id);
}
}
function injectSsrFlagToHooks(plugin) {
const { resolveId, load, transform } = plugin;
return {
...plugin,
resolveId: wrapSsrResolveId(resolveId),
load: wrapSsrLoad(load),
transform: wrapSsrTransform(transform)
};
}
function wrapSsrResolveId(hook) {
if (!hook) return;
const fn = getHookHandler(hook);
const handler = function(id, importer, options) {
return fn.call(this, id, importer, injectSsrFlag(options));
};
if ("handler" in hook) {
return {
...hook,
handler
};
} else {
return handler;
}
}
function wrapSsrLoad(hook) {
if (!hook) return;
const fn = getHookHandler(hook);
const handler = function(id, ...args) {
return fn.call(this, id, injectSsrFlag(args[0]));
};
if ("handler" in hook) {
return {
...hook,
handler
};
} else {
return handler;
}
}
function wrapSsrTransform(hook) {
if (!hook) return;
const fn = getHookHandler(hook);
const handler = function(code, importer, ...args) {
return fn.call(this, code, importer, injectSsrFlag(args[0]));
};
if ("handler" in hook) {
return {
...hook,
handler
};
} else {
return handler;
}
}
function injectSsrFlag(options) {
return { ...options ?? {}, ssr: true };
}
const needsEscapeRegEx = /[\n\r'\\\u2028\u2029]/;
const quoteNewlineRegEx = /([\n\r'\u2028\u2029])/g;
const backSlashRegEx = /\\/g;
function escapeId(id) {
if (!needsEscapeRegEx.test(id)) return id;
return id.replace(backSlashRegEx, "\\\\").replace(quoteNewlineRegEx, "\\$1");
}
const getResolveUrl = (path2, URL = "URL") => `new ${URL}(${path2}).href`;
const getRelativeUrlFromDocument = (relativePath, umd = false) => getResolveUrl(
`'${escapeId(partialEncodeURIPath(relativePath))}', ${umd ? `typeof document === 'undefined' ? location.href : ` : ""}document.currentScript && document.currentScript.src || document.baseURI`
);
const getFileUrlFromFullPath = (path2) => `require('u' + 'rl').pathToFileURL(${path2}).href`;
const getFileUrlFromRelativePath = (path2) => getFileUrlFromFullPath(`__dirname + '/${escapeId(path2)}'`);
const relativeUrlMechanisms = {
amd: (relativePath) => {
if (relativePath[0] !== ".") relativePath = "./" + relativePath;
return getResolveUrl(
`require.toUrl('${escapeId(relativePath)}'), document.baseURI`
);
},
cjs: (relativePath) => `(typeof document === 'undefined' ? ${getFileUrlFromRelativePath(
relativePath
)} : ${getRelativeUrlFromDocument(relativePath)})`,
es: (relativePath) => getResolveUrl(
`'${escapeId(partialEncodeURIPath(relativePath))}', import.meta.url`
),
iife: (relativePath) => getRelativeUrlFromDocument(relativePath),
// NOTE: make sure rollup generate `module` params
system: (relativePath) => getResolveUrl(
`'${escapeId(partialEncodeURIPath(relativePath))}', module.meta.url`
),
umd: (relativePath) => `(typeof document === 'undefined' && typeof location === 'undefined' ? ${getFileUrlFromRelativePath(
relativePath
)} : ${getRelativeUrlFromDocument(relativePath, true)})`
};
const customRelativeUrlMechanisms = {
...relativeUrlMechanisms,
"worker-iife": (relativePath) => getResolveUrl(
`'${escapeId(partialEncodeURIPath(relativePath))}', self.location.href`
)
};
function toOutputFilePathInJS(filename, type, hostId, hostType, config, toRelative) {
const { renderBuiltUrl } = config.experimental;
let relative = config.base === "" || config.base === "./";
if (renderBuiltUrl) {
const result = renderBuiltUrl(filename, {
hostId,
hostType,
type,
ssr: !!config.build.ssr
});
if (typeof result === "object") {
if (result.runtime) {
return { runtime: result.runtime };
}
if (typeof result.relative === "boolean") {
relative = result.relative;
}
} else if (result) {
return result;
}
}
if (relative && !config.build.ssr) {
return toRelative(filename, hostId);
}
return joinUrlSegments(config.decodedBase, filename);
}
function createToImportMetaURLBasedRelativeRuntime(format, isWorker) {
const formatLong = isWorker && format === "iife" ? "worker-iife" : format;
const toRelativePath = customRelativeUrlMechanisms[formatLong];
return (filename, importer) => ({
runtime: toRelativePath(
path$n.posix.relative(path$n.dirname(importer), filename)
)
});
}
function toOutputFilePathWithoutRuntime(filename, type, hostId, hostType, config, toRelative) {
const { renderBuiltUrl } = config.experimental;
let relative = config.base === "" || config.base === "./";
if (renderBuiltUrl) {
const result = renderBuiltUrl(filename, {
hostId,
hostType,
type,
ssr: !!config.build.ssr
});
if (typeof result === "object") {
if (result.runtime) {
throw new Error(
`{ runtime: "${result.runtime}" } is not supported for assets in ${hostType} files: ${filename}`
);
}
if (typeof result.relative === "boolean") {
relative = result.relative;
}
} else if (result) {
return result;
}
}
if (relative && !config.build.ssr) {
return toRelative(filename, hostId);
} else {
return joinUrlSegments(config.decodedBase, filename);
}
}
const toOutputFilePathInCss = toOutputFilePathWithoutRuntime;
const toOutputFilePathInHtml = toOutputFilePathWithoutRuntime;
function areSeparateFolders(a, b) {
const na = normalizePath$3(a);
const nb = normalizePath$3(b);
return na !== nb && !na.startsWith(withTrailingSlash(nb)) && !nb.startsWith(withTrailingSlash(na));
}
var build$1 = {
__proto__: null,
build: build,
createToImportMetaURLBasedRelativeRuntime: createToImportMetaURLBasedRelativeRuntime,
onRollupWarning: onRollupWarning,
resolveBuildOptions: resolveBuildOptions,
resolveBuildOutputs: resolveBuildOutputs,
resolveBuildPlugins: resolveBuildPlugins,
resolveLibFilename: resolveLibFilename,
resolveUserExternal: resolveUserExternal,
toOutputFilePathInCss: toOutputFilePathInCss,
toOutputFilePathInHtml: toOutputFilePathInHtml,
toOutputFilePathInJS: toOutputFilePathInJS,
toOutputFilePathWithoutRuntime: toOutputFilePathWithoutRuntime
};
// NOTE: supports Node 6.x
const NOOP = () => {};
const MIMES = /text|javascript|\/json|xml/i;
/**
* @param {any} chunk
* @param {BufferEncoding} enc
* @returns {number}
*/
function getChunkSize(chunk, enc) {
return chunk ? Buffer.byteLength(chunk, enc) : 0;
}
/**
* @param {import('./index.d.mts').Options} [options]
* @returns {import('./index.d.mts').Middleware}
*/
function compression ({ threshold = 1024, level = -1, brotli = false, gzip = true, mimes = MIMES } = {}) {
const brotliOpts = (typeof brotli === 'object' && brotli) || {};
const gzipOpts = (typeof gzip === 'object' && gzip) || {};
// disable Brotli on Node<12.7 where it is unsupported:
if (!zlib$1.createBrotliCompress) brotli = false;
return (req, res, next = NOOP) => {
const accept = req.headers['accept-encoding'] + '';
const encoding = ((brotli && accept.match(/\bbr\b/)) || (gzip && accept.match(/\bgzip\b/)) || [])[0];
// skip if no response body or no supported encoding:
if (req.method === 'HEAD' || !encoding) return next();
/** @type {zlib.Gzip | zlib.BrotliCompress} */
let compress;
/** @type {Array<[string, function]>?} */
let pendingListeners = [];
let pendingStatus = 0;
let started = false;
let size = 0;
function start() {
started = true;
// @ts-ignore
size = res.getHeader('Content-Length') | 0 || size;
const compressible = mimes.test(
String(res.getHeader('Content-Type') || 'text/plain')
);
const cleartext = !res.getHeader('Content-Encoding');
const listeners = pendingListeners || [];
if (compressible && cleartext && size >= threshold) {
res.setHeader('Content-Encoding', encoding);
res.removeHeader('Content-Length');
if (encoding === 'br') {
compress = zlib$1.createBrotliCompress({
params: Object.assign({
[zlib$1.constants.BROTLI_PARAM_QUALITY]: level,
[zlib$1.constants.BROTLI_PARAM_SIZE_HINT]: size,
}, brotliOpts)
});
} else {
compress = zlib$1.createGzip(
Object.assign({ level }, gzipOpts)
);
}
// backpressure
compress.on('data', chunk => write.call(res, chunk) || compress.pause());
on.call(res, 'drain', () => compress.resume());
compress.on('end', () => end.call(res));
listeners.forEach(p => compress.on.apply(compress, p));
} else {
pendingListeners = null;
listeners.forEach(p => on.apply(res, p));
}
writeHead.call(res, pendingStatus || res.statusCode);
}
const { end, write, on, writeHead } = res;
res.writeHead = function (status, reason, headers) {
if (typeof reason !== 'string') [headers, reason] = [reason, headers];
if (headers) for (let k in headers) res.setHeader(k, headers[k]);
pendingStatus = status;
return this;
};
res.write = function (chunk, enc) {
size += getChunkSize(chunk, enc);
if (!started) start();
if (!compress) return write.apply(this, arguments);
return compress.write.apply(compress, arguments);
};
res.end = function (chunk, enc) {
if (arguments.length > 0 && typeof chunk !== 'function') {
size += getChunkSize(chunk, enc);
}
if (!started) start();
if (!compress) return end.apply(this, arguments);
return compress.end.apply(compress, arguments);
};
res.on = function (type, listener) {
if (!pendingListeners) on.call(this, type, listener);
else if (compress) compress.on(type, listener);
else pendingListeners.push([type, listener]);
return this;
};
next();
};
}
function resolvePreviewOptions(preview2, server) {
return {
port: preview2?.port,
strictPort: preview2?.strictPort ?? server.strictPort,
host: preview2?.host ?? server.host,
https: preview2?.https ?? server.https,
open: preview2?.open ?? server.open,
proxy: preview2?.proxy ?? server.proxy,
cors: preview2?.cors ?? server.cors,
headers: preview2?.headers ?? server.headers
};
}
async function preview(inlineConfig = {}) {
const config = await resolveConfig(
inlineConfig,
"serve",
"production",
"production",
true
);
const distDir = path$n.resolve(config.root, config.build.outDir);
if (!fs__default.existsSync(distDir) && // error if no plugins implement `configurePreviewServer`
config.plugins.every((plugin) => !plugin.configurePreviewServer) && // error if called in CLI only. programmatic usage could access `httpServer`
// and affect file serving
process.argv[1]?.endsWith(path$n.normalize("bin/vite.js")) && process.argv[2] === "preview") {
throw new Error(
`The directory "${config.build.outDir}" does not exist. Did you build your project?`
);
}
const app = connect$1();
const httpServer = await resolveHttpServer(
config.preview,
app,
await resolveHttpsConfig(config.preview?.https)
);
setClientErrorHandler(httpServer, config.logger);
const options = config.preview;
const logger = config.logger;
const closeHttpServer = createServerCloseFn(httpServer);
const server = {
config,
middlewares: app,
httpServer,
async close() {
teardownSIGTERMListener(closeServerAndExit);
await closeHttpServer();
},
resolvedUrls: null,
printUrls() {
if (server.resolvedUrls) {
printServerUrls(server.resolvedUrls, options.host, logger.info);
} else {
throw new Error("cannot print server URLs before server is listening.");
}
},
bindCLIShortcuts(options2) {
bindCLIShortcuts(server, options2);
}
};
const closeServerAndExit = async () => {
try {
await server.close();
} finally {
process.exit();
}
};
setupSIGTERMListener(closeServerAndExit);
const postHooks = [];
for (const hook of config.getSortedPluginHooks("configurePreviewServer")) {
postHooks.push(await hook(server));
}
const { cors } = config.preview;
if (cors !== false) {
app.use(corsMiddleware(typeof cors === "boolean" ? {} : cors));
}
const { proxy } = config.preview;
if (proxy) {
app.use(proxyMiddleware(httpServer, proxy, config));
}
app.use(compression());
if (config.base !== "/") {
app.use(baseMiddleware(config.rawBase, false));
}
const headers = config.preview.headers;
const viteAssetMiddleware = (...args) => sirv(distDir, {
etag: true,
dev: true,
extensions: [],
ignores: false,
setHeaders(res) {
if (headers) {
for (const name in headers) {
res.setHeader(name, headers[name]);
}
}
},
shouldServe(filePath) {
return shouldServeFile(filePath, distDir);
}
})(...args);
app.use(viteAssetMiddleware);
if (config.appType === "spa" || config.appType === "mpa") {
app.use(htmlFallbackMiddleware(distDir, config.appType === "spa"));
}
postHooks.forEach((fn) => fn && fn());
if (config.appType === "spa" || config.appType === "mpa") {
app.use(indexHtmlMiddleware(distDir, server));
app.use(notFoundMiddleware());
}
const hostname = await resolveHostname(options.host);
const port = options.port ?? DEFAULT_PREVIEW_PORT;
await httpServerStart(httpServer, {
port,
strictPort: options.strictPort,
host: hostname.host,
logger
});
server.resolvedUrls = await resolveServerUrls(
httpServer,
config.preview,
config
);
if (options.open) {
const url = server.resolvedUrls?.local[0] ?? server.resolvedUrls?.network[0];
if (url) {
const path2 = typeof options.open === "string" ? new URL(options.open, url).href : url;
openBrowser(path2, true, logger);
}
}
return server;
}
var preview$1 = {
__proto__: null,
preview: preview,
resolvePreviewOptions: resolvePreviewOptions
};
function resolveSSROptions(ssr, preserveSymlinks) {
ssr ??= {};
const optimizeDeps = ssr.optimizeDeps ?? {};
const target = "node";
return {
target,
...ssr,
optimizeDeps: {
...optimizeDeps,
noDiscovery: true,
// always true for ssr
esbuildOptions: {
preserveSymlinks,
...optimizeDeps.esbuildOptions
}
}
};
}
const debug = createDebugger("vite:config");
const promisifiedRealpath = promisify$4(fs__default.realpath);
function defineConfig(config) {
return config;
}
function checkBadCharactersInPath(path2, logger) {
const badChars = [];
if (path2.includes("#")) {
badChars.push("#");
}
if (path2.includes("?")) {
badChars.push("?");
}
if (badChars.length > 0) {
const charString = badChars.map((c) => `"${c}"`).join(" and ");
const inflectedChars = badChars.length > 1 ? "characters" : "character";
logger.warn(
colors$1.yellow(
`The project root contains the ${charString} ${inflectedChars} (${colors$1.cyan(
path2
)}), which may not work when running Vite. Consider renaming the directory to remove the characters.`
)
);
}
}
async function resolveConfig(inlineConfig, command, defaultMode = "development", defaultNodeEnv = "development", isPreview = false) {
let config = inlineConfig;
let configFileDependencies = [];
let mode = inlineConfig.mode || defaultMode;
const isNodeEnvSet = !!process.env.NODE_ENV;
const packageCache = /* @__PURE__ */ new Map();
if (!isNodeEnvSet) {
process.env.NODE_ENV = defaultNodeEnv;
}
const configEnv = {
mode,
command,
isSsrBuild: command === "build" && !!config.build?.ssr,
isPreview
};
let { configFile } = config;
if (configFile !== false) {
const loadResult = await loadConfigFromFile(
configEnv,
configFile,
config.root,
config.logLevel,
config.customLogger
);
if (loadResult) {
config = mergeConfig(loadResult.config, config);
configFile = loadResult.path;
configFileDependencies = loadResult.dependencies;
}
}
mode = inlineConfig.mode || config.mode || mode;
configEnv.mode = mode;
const filterPlugin = (p) => {
if (!p) {
return false;
} else if (!p.apply) {
return true;
} else if (typeof p.apply === "function") {
return p.apply({ ...config, mode }, configEnv);
} else {
return p.apply === command;
}
};
const rawUserPlugins = (await asyncFlatten(config.plugins || [])).filter(filterPlugin);
const [prePlugins, normalPlugins, postPlugins] = sortUserPlugins(rawUserPlugins);
const userPlugins = [...prePlugins, ...normalPlugins, ...postPlugins];
config = await runConfigHook(config, userPlugins, configEnv);
const logger = createLogger(config.logLevel, {
allowClearScreen: config.clearScreen,
customLogger: config.customLogger
});
const resolvedRoot = normalizePath$3(
config.root ? path$n.resolve(config.root) : process.cwd()
);
checkBadCharactersInPath(resolvedRoot, logger);
const clientAlias = [
{
find: /^\/?@vite\/env/,
replacement: path$n.posix.join(FS_PREFIX, normalizePath$3(ENV_ENTRY))
},
{
find: /^\/?@vite\/client/,
replacement: path$n.posix.join(FS_PREFIX, normalizePath$3(CLIENT_ENTRY))
}
];
const resolvedAlias = normalizeAlias(
mergeAlias(clientAlias, config.resolve?.alias || [])
);
const resolveOptions = {
mainFields: config.resolve?.mainFields ?? DEFAULT_MAIN_FIELDS,
conditions: config.resolve?.conditions ?? [],
extensions: config.resolve?.extensions ?? DEFAULT_EXTENSIONS,
dedupe: config.resolve?.dedupe ?? [],
preserveSymlinks: config.resolve?.preserveSymlinks ?? false,
alias: resolvedAlias
};
if (
// @ts-expect-error removed field
config.resolve?.browserField === false && resolveOptions.mainFields.includes("browser")
) {
logger.warn(
colors$1.yellow(
`\`resolve.browserField\` is set to false, but the option is removed in favour of the 'browser' string in \`resolve.mainFields\`. You may want to update \`resolve.mainFields\` to remove the 'browser' string and preserve the previous browser behaviour.`
)
);
}
const envDir = config.envDir ? normalizePath$3(path$n.resolve(resolvedRoot, config.envDir)) : resolvedRoot;
const userEnv = inlineConfig.envFile !== false && loadEnv(mode, envDir, resolveEnvPrefix(config));
const userNodeEnv = process.env.VITE_USER_NODE_ENV;
if (!isNodeEnvSet && userNodeEnv) {
if (userNodeEnv === "development") {
process.env.NODE_ENV = "development";
} else {
logger.warn(
`NODE_ENV=${userNodeEnv} is not supported in the .env file. Only NODE_ENV=development is supported to create a development build of your project. If you need to set process.env.NODE_ENV, you can set it in the Vite config instead.`
);
}
}
const isProduction = process.env.NODE_ENV === "production";
const isBuild = command === "build";
const relativeBaseShortcut = config.base === "" || config.base === "./";
const resolvedBase = relativeBaseShortcut ? !isBuild || config.build?.ssr ? "/" : "./" : resolveBaseUrl(config.base, isBuild, logger) ?? "/";
const resolvedBuildOptions = resolveBuildOptions(
config.build,
logger,
resolvedRoot
);
const pkgDir = findNearestPackageData(resolvedRoot, packageCache)?.dir;
const cacheDir = normalizePath$3(
config.cacheDir ? path$n.resolve(resolvedRoot, config.cacheDir) : pkgDir ? path$n.join(pkgDir, `node_modules/.vite`) : path$n.join(resolvedRoot, `.vite`)
);
const assetsFilter = config.assetsInclude && (!Array.isArray(config.assetsInclude) || config.assetsInclude.length) ? createFilter(config.assetsInclude) : () => false;
const createResolver = (options) => {
let aliasContainer;
let resolverContainer;
return async (id, importer, aliasOnly, ssr2) => {
let container;
if (aliasOnly) {
container = aliasContainer || (aliasContainer = await createPluginContainer({
...resolved,
plugins: [alias$1({ entries: resolved.resolve.alias })]
}));
} else {
container = resolverContainer || (resolverContainer = await createPluginContainer({
...resolved,
plugins: [
alias$1({ entries: resolved.resolve.alias }),
resolvePlugin({
...resolved.resolve,
root: resolvedRoot,
isProduction,
isBuild: command === "build",
ssrConfig: resolved.ssr,
asSrc: true,
preferRelative: false,
tryIndex: true,
...options,
idOnly: true,
fsUtils: getFsUtils(resolved)
})
]
}));
}
return (await container.resolveId(id, importer, {
ssr: ssr2,
scan: options?.scan
}))?.id;
};
};
const { publicDir } = config;
const resolvedPublicDir = publicDir !== false && publicDir !== "" ? normalizePath$3(
path$n.resolve(
resolvedRoot,
typeof publicDir === "string" ? publicDir : "public"
)
) : "";
const server = resolveServerOptions(resolvedRoot, config.server, logger);
const ssr = resolveSSROptions(config.ssr, resolveOptions.preserveSymlinks);
const optimizeDeps = config.optimizeDeps || {};
const BASE_URL = resolvedBase;
let resolved;
let createUserWorkerPlugins = config.worker?.plugins;
if (Array.isArray(createUserWorkerPlugins)) {
createUserWorkerPlugins = () => config.worker?.plugins;
logger.warn(
colors$1.yellow(
`worker.plugins is now a function that returns an array of plugins. Please update your Vite config accordingly.
`
)
);
}
const createWorkerPlugins = async function(bundleChain) {
const rawWorkerUserPlugins = (await asyncFlatten(createUserWorkerPlugins?.() || [])).filter(filterPlugin);
let workerConfig = mergeConfig({}, config);
const [workerPrePlugins, workerNormalPlugins, workerPostPlugins] = sortUserPlugins(rawWorkerUserPlugins);
const workerUserPlugins = [
...workerPrePlugins,
...workerNormalPlugins,
...workerPostPlugins
];
workerConfig = await runConfigHook(
workerConfig,
workerUserPlugins,
configEnv
);
const workerResolved = {
...workerConfig,
...resolved,
isWorker: true,
mainConfig: resolved,
bundleChain
};
const resolvedWorkerPlugins = await resolvePlugins(
workerResolved,
workerPrePlugins,
workerNormalPlugins,
workerPostPlugins
);
await Promise.all(
createPluginHookUtils(resolvedWorkerPlugins).getSortedPluginHooks("configResolved").map((hook) => hook(workerResolved))
);
return resolvedWorkerPlugins;
};
const resolvedWorkerOptions = {
format: config.worker?.format || "iife",
plugins: createWorkerPlugins,
rollupOptions: config.worker?.rollupOptions || {}
};
const base = withTrailingSlash(resolvedBase);
resolved = {
configFile: configFile ? normalizePath$3(configFile) : void 0,
configFileDependencies: configFileDependencies.map(
(name) => normalizePath$3(path$n.resolve(name))
),
inlineConfig,
root: resolvedRoot,
base,
decodedBase: decodeURI(base),
rawBase: resolvedBase,
resolve: resolveOptions,
publicDir: resolvedPublicDir,
cacheDir,
command,
mode,
ssr,
isWorker: false,
mainConfig: null,
bundleChain: [],
isProduction,
plugins: userPlugins,
css: resolveCSSOptions(config.css),
esbuild: config.esbuild === false ? false : {
jsxDev: !isProduction,
...config.esbuild
},
server,
build: resolvedBuildOptions,
preview: resolvePreviewOptions(config.preview, server),
envDir,
env: {
...userEnv,
BASE_URL,
MODE: mode,
DEV: !isProduction,
PROD: isProduction
},
assetsInclude(file) {
return DEFAULT_ASSETS_RE.test(file) || assetsFilter(file);
},
logger,
packageCache,
createResolver,
optimizeDeps: {
holdUntilCrawlEnd: true,
...optimizeDeps,
esbuildOptions: {
preserveSymlinks: resolveOptions.preserveSymlinks,
...optimizeDeps.esbuildOptions
}
},
worker: resolvedWorkerOptions,
appType: config.appType ?? "spa",
experimental: {
importGlobRestoreExtension: false,
hmrPartialAccept: false,
...config.experimental
},
getSortedPlugins: void 0,
getSortedPluginHooks: void 0
};
resolved = {
...config,
...resolved
};
resolved.plugins = await resolvePlugins(
resolved,
prePlugins,
normalPlugins,
postPlugins
);
Object.assign(resolved, createPluginHookUtils(resolved.plugins));
await Promise.all(
resolved.getSortedPluginHooks("configResolved").map((hook) => hook(resolved))
);
optimizeDepsDisabledBackwardCompatibility(resolved, resolved.optimizeDeps);
optimizeDepsDisabledBackwardCompatibility(
resolved,
resolved.ssr.optimizeDeps,
"ssr."
);
debug?.(`using resolved config: %O`, {
...resolved,
plugins: resolved.plugins.map((p) => p.name),
worker: {
...resolved.worker,
plugins: `() => plugins`
}
});
if (config.build?.terserOptions && config.build.minify && config.build.minify !== "terser") {
logger.warn(
colors$1.yellow(
`build.terserOptions is specified but build.minify is not set to use Terser. Note Vite now defaults to use esbuild for minification. If you still prefer Terser, set build.minify to "terser".`
)
);
}
const outputOption = config.build?.rollupOptions?.output ?? [];
if (Array.isArray(outputOption)) {
const assetFileNamesList = outputOption.map(
(output) => output.assetFileNames
);
if (assetFileNamesList.length > 1) {
const firstAssetFileNames = assetFileNamesList[0];
const hasDifferentReference = assetFileNamesList.some(
(assetFileNames) => assetFileNames !== firstAssetFileNames
);
if (hasDifferentReference) {
resolved.logger.warn(
colors$1.yellow(`
assetFileNames isn't equal for every build.rollupOptions.output. A single pattern across all outputs is supported by Vite.
`)
);
}
}
}
if (
// @ts-expect-error Option removed
config.legacy?.buildSsrCjsExternalHeuristics || // @ts-expect-error Option removed
config.ssr?.format === "cjs"
) {
resolved.logger.warn(
colors$1.yellow(`
(!) Experimental legacy.buildSsrCjsExternalHeuristics and ssr.format were be removed in Vite 5.
The only SSR Output format is ESM. Find more information at https://github.com/vitejs/vite/discussions/13816.
`)
);
}
const resolvedBuildOutDir = normalizePath$3(
path$n.resolve(resolved.root, resolved.build.outDir)
);
if (isParentDirectory(resolvedBuildOutDir, resolved.root) || resolvedBuildOutDir === resolved.root) {
resolved.logger.warn(
colors$1.yellow(`
(!) build.outDir must not be the same directory of root or a parent directory of root as this could cause Vite to overwriting source files with build outputs.
`)
);
}
return resolved;
}
function resolveBaseUrl(base = "/", isBuild, logger) {
if (base[0] === ".") {
logger.warn(
colors$1.yellow(
colors$1.bold(
`(!) invalid "base" option: "${base}". The value can only be an absolute URL, "./", or an empty string.`
)
)
);
return "/";
}
const isExternal = isExternalUrl(base);
if (!isExternal && base[0] !== "/") {
logger.warn(
colors$1.yellow(
colors$1.bold(`(!) "base" option should start with a slash.`)
)
);
}
if (!isBuild || !isExternal) {
base = new URL(base, "http://vitejs.dev").pathname;
if (base[0] !== "/") {
base = "/" + base;
}
}
return base;
}
function sortUserPlugins(plugins) {
const prePlugins = [];
const postPlugins = [];
const normalPlugins = [];
if (plugins) {
plugins.flat().forEach((p) => {
if (p.enforce === "pre") prePlugins.push(p);
else if (p.enforce === "post") postPlugins.push(p);
else normalPlugins.push(p);
});
}
return [prePlugins, normalPlugins, postPlugins];
}
async function loadConfigFromFile(configEnv, configFile, configRoot = process.cwd(), logLevel, customLogger) {
const start = performance$1.now();
const getTime = () => `${(performance$1.now() - start).toFixed(2)}ms`;
let resolvedPath;
if (configFile) {
resolvedPath = path$n.resolve(configFile);
} else {
for (const filename of DEFAULT_CONFIG_FILES) {
const filePath = path$n.resolve(configRoot, filename);
if (!fs__default.existsSync(filePath)) continue;
resolvedPath = filePath;
break;
}
}
if (!resolvedPath) {
debug?.("no config file found.");
return null;
}
const isESM = isFilePathESM(resolvedPath);
try {
const bundled = await bundleConfigFile(resolvedPath, isESM);
const userConfig = await loadConfigFromBundledFile(
resolvedPath,
bundled.code,
isESM
);
debug?.(`bundled config file loaded in ${getTime()}`);
const config = await (typeof userConfig === "function" ? userConfig(configEnv) : userConfig);
if (!isObject$1(config)) {
throw new Error(`config must export or return an object.`);
}
return {
path: normalizePath$3(resolvedPath),
config,
dependencies: bundled.dependencies
};
} catch (e) {
createLogger(logLevel, { customLogger }).error(
colors$1.red(`failed to load config from ${resolvedPath}`),
{
error: e
}
);
throw e;
}
}
async function bundleConfigFile(fileName, isESM) {
const dirnameVarName = "__vite_injected_original_dirname";
const filenameVarName = "__vite_injected_original_filename";
const importMetaUrlVarName = "__vite_injected_original_import_meta_url";
const result = await build$3({
absWorkingDir: process.cwd(),
entryPoints: [fileName],
write: false,
target: [`node${process.versions.node}`],
platform: "node",
bundle: true,
format: isESM ? "esm" : "cjs",
mainFields: ["main"],
sourcemap: "inline",
metafile: true,
define: {
__dirname: dirnameVarName,
__filename: filenameVarName,
"import.meta.url": importMetaUrlVarName,
"import.meta.dirname": dirnameVarName,
"import.meta.filename": filenameVarName
},
plugins: [
{
name: "externalize-deps",
setup(build2) {
const packageCache = /* @__PURE__ */ new Map();
const resolveByViteResolver = (id, importer, isRequire) => {
return tryNodeResolve(
id,
importer,
{
root: path$n.dirname(fileName),
isBuild: true,
isProduction: true,
preferRelative: false,
tryIndex: true,
mainFields: [],
conditions: [],
overrideConditions: ["node"],
dedupe: [],
extensions: DEFAULT_EXTENSIONS,
preserveSymlinks: false,
packageCache,
isRequire
},
false
)?.id;
};
build2.onResolve(
{ filter: /^[^.].*/ },
async ({ path: id, importer, kind }) => {
if (kind === "entry-point" || path$n.isAbsolute(id) || isNodeBuiltin(id)) {
return;
}
if (isBuiltin(id)) {
return { external: true };
}
const isImport = isESM || kind === "dynamic-import";
let idFsPath;
try {
idFsPath = resolveByViteResolver(id, importer, !isImport);
} catch (e) {
if (!isImport) {
let canResolveWithImport = false;
try {
canResolveWithImport = !!resolveByViteResolver(
id,
importer,
false
);
} catch {
}
if (canResolveWithImport) {
throw new Error(
`Failed to resolve ${JSON.stringify(
id
)}. This package is ESM only but it was tried to load by \`require\`. See https://vitejs.dev/guide/troubleshooting.html#this-package-is-esm-only for more details.`
);
}
}
throw e;
}
if (idFsPath && isImport) {
idFsPath = pathToFileURL(idFsPath).href;
}
if (idFsPath && !isImport && isFilePathESM(idFsPath, packageCache)) {
throw new Error(
`${JSON.stringify(
id
)} resolved to an ESM file. ESM file cannot be loaded by \`require\`. See https://vitejs.dev/guide/troubleshooting.html#this-package-is-esm-only for more details.`
);
}
return {
path: idFsPath,
external: true
};
}
);
}
},
{
name: "inject-file-scope-variables",
setup(build2) {
build2.onLoad({ filter: /\.[cm]?[jt]s$/ }, async (args) => {
const contents = await fsp.readFile(args.path, "utf-8");
const injectValues = `const ${dirnameVarName} = ${JSON.stringify(
path$n.dirname(args.path)
)};const ${filenameVarName} = ${JSON.stringify(args.path)};const ${importMetaUrlVarName} = ${JSON.stringify(
pathToFileURL(args.path).href
)};`;
return {
loader: args.path.endsWith("ts") ? "ts" : "js",
contents: injectValues + contents
};
});
}
}
]
});
const { text } = result.outputFiles[0];
return {
code: text,
dependencies: result.metafile ? Object.keys(result.metafile.inputs) : []
};
}
const _require = createRequire$1(import.meta.url);
async function loadConfigFromBundledFile(fileName, bundledCode, isESM) {
if (isESM) {
const fileBase = `${fileName}.timestamp-${Date.now()}-${Math.random().toString(16).slice(2)}`;
const fileNameTmp = `${fileBase}.mjs`;
const fileUrl = `${pathToFileURL(fileBase)}.mjs`;
await fsp.writeFile(fileNameTmp, bundledCode);
try {
return (await import(fileUrl)).default;
} finally {
fs__default.unlink(fileNameTmp, () => {
});
}
} else {
const extension = path$n.extname(fileName);
const realFileName = await promisifiedRealpath(fileName);
const loaderExt = extension in _require.extensions ? extension : ".js";
const defaultLoader = _require.extensions[loaderExt];
_require.extensions[loaderExt] = (module, filename) => {
if (filename === realFileName) {
module._compile(bundledCode, filename);
} else {
defaultLoader(module, filename);
}
};
delete _require.cache[_require.resolve(fileName)];
const raw = _require(fileName);
_require.extensions[loaderExt] = defaultLoader;
return raw.__esModule ? raw.default : raw;
}
}
async function runConfigHook(config, plugins, configEnv) {
let conf = config;
for (const p of getSortedPluginsByHook("config", plugins)) {
const hook = p.config;
const handler = getHookHandler(hook);
if (handler) {
const res = await handler(conf, configEnv);
if (res) {
conf = mergeConfig(conf, res);
}
}
}
return conf;
}
function getDepOptimizationConfig(config, ssr) {
return ssr ? config.ssr.optimizeDeps : config.optimizeDeps;
}
function isDepsOptimizerEnabled(config, ssr) {
const optimizeDeps = getDepOptimizationConfig(config, ssr);
return !(optimizeDeps.noDiscovery && !optimizeDeps.include?.length);
}
function optimizeDepsDisabledBackwardCompatibility(resolved, optimizeDeps, optimizeDepsPath = "") {
const optimizeDepsDisabled = optimizeDeps.disabled;
if (optimizeDepsDisabled !== void 0) {
if (optimizeDepsDisabled === true || optimizeDepsDisabled === "dev") {
const commonjsOptionsInclude = resolved.build?.commonjsOptions?.include;
const commonjsPluginDisabled = Array.isArray(commonjsOptionsInclude) && commonjsOptionsInclude.length === 0;
optimizeDeps.noDiscovery = true;
optimizeDeps.include = void 0;
if (commonjsPluginDisabled) {
resolved.build.commonjsOptions.include = void 0;
}
resolved.logger.warn(
colors$1.yellow(`(!) Experimental ${optimizeDepsPath}optimizeDeps.disabled and deps pre-bundling during build were removed in Vite 5.1.
To disable the deps optimizer, set ${optimizeDepsPath}optimizeDeps.noDiscovery to true and ${optimizeDepsPath}optimizeDeps.include as undefined or empty.
Please remove ${optimizeDepsPath}optimizeDeps.disabled from your config.
${commonjsPluginDisabled ? "Empty config.build.commonjsOptions.include will be ignored to support CJS during build. This config should also be removed." : ""}
`)
);
} else if (optimizeDepsDisabled === false || optimizeDepsDisabled === "build") {
resolved.logger.warn(
colors$1.yellow(`(!) Experimental ${optimizeDepsPath}optimizeDeps.disabled and deps pre-bundling during build were removed in Vite 5.1.
Setting it to ${optimizeDepsDisabled} now has no effect.
Please remove ${optimizeDepsPath}optimizeDeps.disabled from your config.
`)
);
}
}
}
export { colors$1 as A, getDefaultExportFromCjs as B, commonjsGlobal as C, index$1 as D, index as E, build$1 as F, preview$1 as G, arraify as a, build as b, createServer as c, defineConfig as d, preprocessCSS as e, formatPostcssSourceMap as f, buildErrorMessage as g, fetchModule as h, isInNodeModules$1 as i, mergeAlias as j, createFilter as k, loadConfigFromFile as l, mergeConfig as m, normalizePath$3 as n, optimizeDeps as o, preview as p, rollupVersion as q, resolveConfig as r, sortUserPlugins as s, transformWithEsbuild as t, send as u, createLogger as v, searchForWorkspaceRoot as w, isFileServingAllowed as x, loadEnv as y, resolveEnvPrefix as z };