update
This commit is contained in:
@@ -0,0 +1,800 @@
|
||||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const { AsyncSeriesBailHook, AsyncSeriesHook, SyncHook } = require("tapable");
|
||||
const createInnerContext = require("./createInnerContext");
|
||||
const { parseIdentifier } = require("./util/identifier");
|
||||
const {
|
||||
normalize,
|
||||
cachedJoin: join,
|
||||
getType,
|
||||
PathType
|
||||
} = require("./util/path");
|
||||
|
||||
/** @typedef {import("./ResolverFactory").ResolveOptions} ResolveOptions */
|
||||
|
||||
/** @typedef {Error & { details?: string }} ErrorWithDetail */
|
||||
|
||||
/** @typedef {(err: ErrorWithDetail | null, res?: string | false, req?: ResolveRequest) => void} ResolveCallback */
|
||||
|
||||
/**
|
||||
* @typedef {Object} PossibleFileSystemError
|
||||
* @property {string=} code
|
||||
* @property {number=} errno
|
||||
* @property {string=} path
|
||||
* @property {string=} syscall
|
||||
*/
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @callback FileSystemCallback
|
||||
* @param {PossibleFileSystemError & Error | null} err
|
||||
* @param {T=} result
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {string | Buffer | URL} PathLike
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {PathLike | number} PathOrFileDescriptor
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ObjectEncodingOptions
|
||||
* @property {BufferEncoding | null | undefined} [encoding]
|
||||
*/
|
||||
|
||||
/** @typedef {function(NodeJS.ErrnoException | null, string=): void} StringCallback */
|
||||
/** @typedef {function(NodeJS.ErrnoException | null, Buffer=): void} BufferCallback */
|
||||
/** @typedef {function(NodeJS.ErrnoException | null, (string | Buffer)=): void} StringOrBufferCallback */
|
||||
/** @typedef {function(NodeJS.ErrnoException | null, IStats=): void} StatsCallback */
|
||||
/** @typedef {function(NodeJS.ErrnoException | null, IBigIntStats=): void} BigIntStatsCallback */
|
||||
/** @typedef {function(NodeJS.ErrnoException | null, (IStats | IBigIntStats)=): void} StatsOrBigIntStatsCallback */
|
||||
/** @typedef {function(NodeJS.ErrnoException | Error | null, JsonObject=): void} ReadJsonCallback */
|
||||
/** @typedef {function(NodeJS.ErrnoException | null, string[]=): void} ReaddirStringCallback */
|
||||
/** @typedef {function(NodeJS.ErrnoException | null, Buffer[]=): void} ReaddirBufferCallback */
|
||||
/** @typedef {function(NodeJS.ErrnoException | null, (string[] | Buffer[])=): void} ReaddirStringOrBufferCallback */
|
||||
/** @typedef {function(NodeJS.ErrnoException | null, Dirent[]=): void} ReaddirDirentCallback */
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @typedef {Object} IStatsBase
|
||||
* @property {() => boolean} isFile
|
||||
* @property {() => boolean} isDirectory
|
||||
* @property {() => boolean} isBlockDevice
|
||||
* @property {() => boolean} isCharacterDevice
|
||||
* @property {() => boolean} isSymbolicLink
|
||||
* @property {() => boolean} isFIFO
|
||||
* @property {() => boolean} isSocket
|
||||
* @property {T} dev
|
||||
* @property {T} ino
|
||||
* @property {T} mode
|
||||
* @property {T} nlink
|
||||
* @property {T} uid
|
||||
* @property {T} gid
|
||||
* @property {T} rdev
|
||||
* @property {T} size
|
||||
* @property {T} blksize
|
||||
* @property {T} blocks
|
||||
* @property {T} atimeMs
|
||||
* @property {T} mtimeMs
|
||||
* @property {T} ctimeMs
|
||||
* @property {T} birthtimeMs
|
||||
* @property {Date} atime
|
||||
* @property {Date} mtime
|
||||
* @property {Date} ctime
|
||||
* @property {Date} birthtime
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {IStatsBase<number>} IStats
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {IStatsBase<bigint> & { atimeNs: bigint, mtimeNs: bigint, ctimeNs: bigint, birthtimeNs: bigint }} IBigIntStats
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} Dirent
|
||||
* @property {() => boolean} isFile
|
||||
* @property {() => boolean} isDirectory
|
||||
* @property {() => boolean} isBlockDevice
|
||||
* @property {() => boolean} isCharacterDevice
|
||||
* @property {() => boolean} isSymbolicLink
|
||||
* @property {() => boolean} isFIFO
|
||||
* @property {() => boolean} isSocket
|
||||
* @property {string} name
|
||||
* @property {string} path
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} StatOptions
|
||||
* @property {(boolean | undefined)=} bigint
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} StatSyncOptions
|
||||
* @property {(boolean | undefined)=} bigint
|
||||
* @property {(boolean | undefined)=} throwIfNoEntry
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* (path: PathOrFileDescriptor, options: ({ encoding?: null | undefined, flag?: string | undefined } & import("events").Abortable) | undefined | null, callback: BufferCallback): void;
|
||||
* (path: PathOrFileDescriptor, options: ({ encoding: BufferEncoding, flag?: string | undefined } & import("events").Abortable) | BufferEncoding, callback: StringCallback): void;
|
||||
* (path: PathOrFileDescriptor, options: (ObjectEncodingOptions & { flag?: string | undefined } & import("events").Abortable) | BufferEncoding | undefined | null, callback: StringOrBufferCallback): void;
|
||||
* (path: PathOrFileDescriptor, callback: BufferCallback): void;
|
||||
* }} ReadFile
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {ObjectEncodingOptions | BufferEncoding | undefined | null} EncodingOption
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {'buffer'| { encoding: 'buffer' }} BufferEncodingOption
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* (path: PathOrFileDescriptor, options?: { encoding?: null | undefined, flag?: string | undefined } | null): Buffer;
|
||||
* (path: PathOrFileDescriptor, options: { encoding: BufferEncoding, flag?: string | undefined } | BufferEncoding): string;
|
||||
* (path: PathOrFileDescriptor, options?: (ObjectEncodingOptions & { flag?: string | undefined }) | BufferEncoding | null): string | Buffer;
|
||||
* }} ReadFileSync
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* (path: PathLike, options: { encoding: BufferEncoding | null, withFileTypes?: false | undefined, recursive?: boolean | undefined } | BufferEncoding | undefined | null, callback: ReaddirStringCallback): void;
|
||||
* (path: PathLike, options: { encoding: 'buffer', withFileTypes?: false | undefined, recursive?: boolean | undefined } | 'buffer', callback: ReaddirBufferCallback): void;
|
||||
* (path: PathLike, callback: ReaddirStringCallback): void;
|
||||
* (path: PathLike, options: (ObjectEncodingOptions & { withFileTypes?: false | undefined, recursive?: boolean | undefined }) | BufferEncoding | undefined | null, callback: ReaddirStringOrBufferCallback): void;
|
||||
* (path: PathLike, options: ObjectEncodingOptions & { withFileTypes: true, recursive?: boolean | undefined }, callback: ReaddirDirentCallback): void;
|
||||
* }} Readdir
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* (path: PathLike, options?: { encoding: BufferEncoding | null, withFileTypes?: false | undefined, recursive?: boolean | undefined } | BufferEncoding | null): string[];
|
||||
* (path: PathLike, options: { encoding: 'buffer', withFileTypes?: false | undefined, recursive?: boolean | undefined } | 'buffer'): Buffer[];
|
||||
* (path: PathLike, options?: (ObjectEncodingOptions & { withFileTypes?: false | undefined, recursive?: boolean | undefined }) | BufferEncoding | null): string[] | Buffer[];
|
||||
* (path: PathLike, options: ObjectEncodingOptions & { withFileTypes: true, recursive?: boolean | undefined }): Dirent[];
|
||||
* }} ReaddirSync
|
||||
|
||||
/**
|
||||
* @typedef {function(PathOrFileDescriptor, ReadJsonCallback): void} ReadJson
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {function(PathOrFileDescriptor): JsonObject} ReadJsonSync
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* (path: PathLike, options: EncodingOption, callback: StringCallback): void;
|
||||
* (path: PathLike, options: BufferEncodingOption, callback: BufferCallback): void;
|
||||
* (path: PathLike, options: EncodingOption, callback: StringOrBufferCallback): void;
|
||||
* (path: PathLike, callback: StringCallback): void;
|
||||
* }} Readlink
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* (path: PathLike, options?: EncodingOption): string;
|
||||
* (path: PathLike, options: BufferEncodingOption): Buffer;
|
||||
* (path: PathLike, options?: EncodingOption): string | Buffer;
|
||||
* }} ReadlinkSync
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* (path: PathLike, callback: StatsCallback): void;
|
||||
* (path: PathLike, options: (StatOptions & { bigint?: false | undefined }) | undefined, callback: StatsCallback): void;
|
||||
* (path: PathLike, options: StatOptions & { bigint: true }, callback: BigIntStatsCallback): void;
|
||||
* (path: PathLike, options: StatOptions | undefined, callback: StatsOrBigIntStatsCallback): void;
|
||||
* }} LStat
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* (path: PathLike, options?: undefined): IStats;
|
||||
* (path: PathLike, options?: StatSyncOptions & { bigint?: false | undefined, throwIfNoEntry: false }): IStats | undefined;
|
||||
* (path: PathLike, options: StatSyncOptions & { bigint: true, throwIfNoEntry: false }): IBigIntStats | undefined;
|
||||
* (path: PathLike, options?: StatSyncOptions & { bigint?: false | undefined }): IStats;
|
||||
* (path: PathLike, options: StatSyncOptions & { bigint: true }): IBigIntStats;
|
||||
* (path: PathLike, options: StatSyncOptions & { bigint: boolean, throwIfNoEntry?: false | undefined }): IStats | IBigIntStats;
|
||||
* (path: PathLike, options?: StatSyncOptions): IStats | IBigIntStats | undefined;
|
||||
* }} LStatSync
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* (path: PathLike, callback: StatsCallback): void;
|
||||
* (path: PathLike, options: (StatOptions & { bigint?: false | undefined }) | undefined, callback: StatsCallback): void;
|
||||
* (path: PathLike, options: StatOptions & { bigint: true }, callback: BigIntStatsCallback): void;
|
||||
* (path: PathLike, options: StatOptions | undefined, callback: StatsOrBigIntStatsCallback): void;
|
||||
* }} Stat
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* (path: PathLike, options?: undefined): IStats;
|
||||
* (path: PathLike, options?: StatSyncOptions & { bigint?: false | undefined, throwIfNoEntry: false }): IStats | undefined;
|
||||
* (path: PathLike, options: StatSyncOptions & { bigint: true, throwIfNoEntry: false }): IBigIntStats | undefined;
|
||||
* (path: PathLike, options?: StatSyncOptions & { bigint?: false | undefined }): IStats;
|
||||
* (path: PathLike, options: StatSyncOptions & { bigint: true }): IBigIntStats;
|
||||
* (path: PathLike, options: StatSyncOptions & { bigint: boolean, throwIfNoEntry?: false | undefined }): IStats | IBigIntStats;
|
||||
* (path: PathLike, options?: StatSyncOptions): IStats | IBigIntStats | undefined;
|
||||
* }} StatSync
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* (path: PathLike, options: EncodingOption, callback: StringCallback): void;
|
||||
* (path: PathLike, options: BufferEncodingOption, callback: BufferCallback): void;
|
||||
* (path: PathLike, options: EncodingOption, callback: StringOrBufferCallback): void;
|
||||
* (path: PathLike, callback: StringCallback): void;
|
||||
* }} RealPath
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* (path: PathLike, options?: EncodingOption): string;
|
||||
* (path: PathLike, options: BufferEncodingOption): Buffer;
|
||||
* (path: PathLike, options?: EncodingOption): string | Buffer;
|
||||
* }} RealPathSync
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} FileSystem
|
||||
* @property {ReadFile} readFile
|
||||
* @property {Readdir} readdir
|
||||
* @property {ReadJson=} readJson
|
||||
* @property {Readlink} readlink
|
||||
* @property {LStat=} lstat
|
||||
* @property {Stat} stat
|
||||
* @property {RealPath=} realpath
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} SyncFileSystem
|
||||
* @property {ReadFileSync} readFileSync
|
||||
* @property {ReaddirSync} readdirSync
|
||||
* @property {ReadJsonSync=} readJsonSync
|
||||
* @property {ReadlinkSync} readlinkSync
|
||||
* @property {LStatSync=} lstatSync
|
||||
* @property {StatSync} statSync
|
||||
* @property {RealPathSync=} realpathSync
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ParsedIdentifier
|
||||
* @property {string} request
|
||||
* @property {string} query
|
||||
* @property {string} fragment
|
||||
* @property {boolean} directory
|
||||
* @property {boolean} module
|
||||
* @property {boolean} file
|
||||
* @property {boolean} internal
|
||||
*/
|
||||
|
||||
/** @typedef {string | number | boolean | null} JsonPrimitive */
|
||||
/** @typedef {JsonValue[]} JsonArray */
|
||||
/** @typedef {JsonPrimitive | JsonObject | JsonArray} JsonValue */
|
||||
/** @typedef {{[Key in string]: JsonValue} & {[Key in string]?: JsonValue | undefined}} JsonObject */
|
||||
|
||||
/**
|
||||
* @typedef {Object} BaseResolveRequest
|
||||
* @property {string | false} path
|
||||
* @property {object=} context
|
||||
* @property {string=} descriptionFilePath
|
||||
* @property {string=} descriptionFileRoot
|
||||
* @property {JsonObject=} descriptionFileData
|
||||
* @property {string=} relativePath
|
||||
* @property {boolean=} ignoreSymlinks
|
||||
* @property {boolean=} fullySpecified
|
||||
* @property {string=} __innerRequest
|
||||
* @property {string=} __innerRequest_request
|
||||
* @property {string=} __innerRequest_relativePath
|
||||
*/
|
||||
|
||||
/** @typedef {BaseResolveRequest & Partial<ParsedIdentifier>} ResolveRequest */
|
||||
|
||||
/**
|
||||
* String with special formatting
|
||||
* @typedef {string} StackEntry
|
||||
*/
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @typedef {{ add: (item: T) => void }} WriteOnlySet
|
||||
*/
|
||||
|
||||
/** @typedef {(function (ResolveRequest): void)} ResolveContextYield */
|
||||
|
||||
/**
|
||||
* Resolve context
|
||||
* @typedef {Object} ResolveContext
|
||||
* @property {WriteOnlySet<string>=} contextDependencies
|
||||
* @property {WriteOnlySet<string>=} fileDependencies files that was found on file system
|
||||
* @property {WriteOnlySet<string>=} missingDependencies dependencies that was not found on file system
|
||||
* @property {Set<StackEntry>=} stack set of hooks' calls. For instance, `resolve → parsedResolve → describedResolve`,
|
||||
* @property {(function(string): void)=} log log function
|
||||
* @property {ResolveContextYield=} yield yield result, if provided plugins can return several results
|
||||
*/
|
||||
|
||||
/** @typedef {AsyncSeriesBailHook<[ResolveRequest, ResolveContext], ResolveRequest | null>} ResolveStepHook */
|
||||
|
||||
/**
|
||||
* @typedef {Object} KnownHooks
|
||||
* @property {SyncHook<[ResolveStepHook, ResolveRequest], void>} resolveStep
|
||||
* @property {SyncHook<[ResolveRequest, Error]>} noResolve
|
||||
* @property {ResolveStepHook} resolve
|
||||
* @property {AsyncSeriesHook<[ResolveRequest, ResolveContext]>} result
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {{[key: string]: ResolveStepHook}} EnsuredHooks
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} str input string
|
||||
* @returns {string} in camel case
|
||||
*/
|
||||
function toCamelCase(str) {
|
||||
return str.replace(/-([a-z])/g, str => str.slice(1).toUpperCase());
|
||||
}
|
||||
|
||||
class Resolver {
|
||||
/**
|
||||
* @param {ResolveStepHook} hook hook
|
||||
* @param {ResolveRequest} request request
|
||||
* @returns {StackEntry} stack entry
|
||||
*/
|
||||
static createStackEntry(hook, request) {
|
||||
return (
|
||||
hook.name +
|
||||
": (" +
|
||||
request.path +
|
||||
") " +
|
||||
(request.request || "") +
|
||||
(request.query || "") +
|
||||
(request.fragment || "") +
|
||||
(request.directory ? " directory" : "") +
|
||||
(request.module ? " module" : "")
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FileSystem} fileSystem a filesystem
|
||||
* @param {ResolveOptions} options options
|
||||
*/
|
||||
constructor(fileSystem, options) {
|
||||
this.fileSystem = fileSystem;
|
||||
this.options = options;
|
||||
/** @type {KnownHooks} */
|
||||
this.hooks = {
|
||||
resolveStep: new SyncHook(["hook", "request"], "resolveStep"),
|
||||
noResolve: new SyncHook(["request", "error"], "noResolve"),
|
||||
resolve: new AsyncSeriesBailHook(
|
||||
["request", "resolveContext"],
|
||||
"resolve"
|
||||
),
|
||||
result: new AsyncSeriesHook(["result", "resolveContext"], "result")
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string | ResolveStepHook} name hook name or hook itself
|
||||
* @returns {ResolveStepHook} the hook
|
||||
*/
|
||||
ensureHook(name) {
|
||||
if (typeof name !== "string") {
|
||||
return name;
|
||||
}
|
||||
name = toCamelCase(name);
|
||||
if (/^before/.test(name)) {
|
||||
return /** @type {ResolveStepHook} */ (
|
||||
this.ensureHook(name[6].toLowerCase() + name.slice(7)).withOptions({
|
||||
stage: -10
|
||||
})
|
||||
);
|
||||
}
|
||||
if (/^after/.test(name)) {
|
||||
return /** @type {ResolveStepHook} */ (
|
||||
this.ensureHook(name[5].toLowerCase() + name.slice(6)).withOptions({
|
||||
stage: 10
|
||||
})
|
||||
);
|
||||
}
|
||||
/** @type {ResolveStepHook} */
|
||||
const hook = /** @type {KnownHooks & EnsuredHooks} */ (this.hooks)[name];
|
||||
if (!hook) {
|
||||
/** @type {KnownHooks & EnsuredHooks} */
|
||||
(this.hooks)[name] = new AsyncSeriesBailHook(
|
||||
["request", "resolveContext"],
|
||||
name
|
||||
);
|
||||
|
||||
return /** @type {KnownHooks & EnsuredHooks} */ (this.hooks)[name];
|
||||
}
|
||||
return hook;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string | ResolveStepHook} name hook name or hook itself
|
||||
* @returns {ResolveStepHook} the hook
|
||||
*/
|
||||
getHook(name) {
|
||||
if (typeof name !== "string") {
|
||||
return name;
|
||||
}
|
||||
name = toCamelCase(name);
|
||||
if (/^before/.test(name)) {
|
||||
return /** @type {ResolveStepHook} */ (
|
||||
this.getHook(name[6].toLowerCase() + name.slice(7)).withOptions({
|
||||
stage: -10
|
||||
})
|
||||
);
|
||||
}
|
||||
if (/^after/.test(name)) {
|
||||
return /** @type {ResolveStepHook} */ (
|
||||
this.getHook(name[5].toLowerCase() + name.slice(6)).withOptions({
|
||||
stage: 10
|
||||
})
|
||||
);
|
||||
}
|
||||
/** @type {ResolveStepHook} */
|
||||
const hook = /** @type {KnownHooks & EnsuredHooks} */ (this.hooks)[name];
|
||||
if (!hook) {
|
||||
throw new Error(`Hook ${name} doesn't exist`);
|
||||
}
|
||||
return hook;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {object} context context information object
|
||||
* @param {string} path context path
|
||||
* @param {string} request request string
|
||||
* @returns {string | false} result
|
||||
*/
|
||||
resolveSync(context, path, request) {
|
||||
/** @type {Error | null | undefined} */
|
||||
let err = undefined;
|
||||
/** @type {string | false | undefined} */
|
||||
let result = undefined;
|
||||
let sync = false;
|
||||
this.resolve(context, path, request, {}, (e, r) => {
|
||||
err = e;
|
||||
result = r;
|
||||
sync = true;
|
||||
});
|
||||
if (!sync) {
|
||||
throw new Error(
|
||||
"Cannot 'resolveSync' because the fileSystem is not sync. Use 'resolve'!"
|
||||
);
|
||||
}
|
||||
if (err) throw err;
|
||||
if (result === undefined) throw new Error("No result");
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {object} context context information object
|
||||
* @param {string} path context path
|
||||
* @param {string} request request string
|
||||
* @param {ResolveContext} resolveContext resolve context
|
||||
* @param {ResolveCallback} callback callback function
|
||||
* @returns {void}
|
||||
*/
|
||||
resolve(context, path, request, resolveContext, callback) {
|
||||
if (!context || typeof context !== "object")
|
||||
return callback(new Error("context argument is not an object"));
|
||||
if (typeof path !== "string")
|
||||
return callback(new Error("path argument is not a string"));
|
||||
if (typeof request !== "string")
|
||||
return callback(new Error("request argument is not a string"));
|
||||
if (!resolveContext)
|
||||
return callback(new Error("resolveContext argument is not set"));
|
||||
|
||||
/** @type {ResolveRequest} */
|
||||
const obj = {
|
||||
context: context,
|
||||
path: path,
|
||||
request: request
|
||||
};
|
||||
|
||||
/** @type {ResolveContextYield | undefined} */
|
||||
let yield_;
|
||||
let yieldCalled = false;
|
||||
/** @type {ResolveContextYield | undefined} */
|
||||
let finishYield;
|
||||
if (typeof resolveContext.yield === "function") {
|
||||
const old = resolveContext.yield;
|
||||
/**
|
||||
* @param {ResolveRequest} obj object
|
||||
*/
|
||||
yield_ = obj => {
|
||||
old(obj);
|
||||
yieldCalled = true;
|
||||
};
|
||||
/**
|
||||
* @param {ResolveRequest} result result
|
||||
* @returns {void}
|
||||
*/
|
||||
finishYield = result => {
|
||||
if (result) {
|
||||
/** @type {ResolveContextYield} */ (yield_)(result);
|
||||
}
|
||||
callback(null);
|
||||
};
|
||||
}
|
||||
|
||||
const message = `resolve '${request}' in '${path}'`;
|
||||
|
||||
/**
|
||||
* @param {ResolveRequest} result result
|
||||
* @returns {void}
|
||||
*/
|
||||
const finishResolved = result => {
|
||||
return callback(
|
||||
null,
|
||||
result.path === false
|
||||
? false
|
||||
: `${result.path.replace(/#/g, "\0#")}${
|
||||
result.query ? result.query.replace(/#/g, "\0#") : ""
|
||||
}${result.fragment || ""}`,
|
||||
result
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string[]} log logs
|
||||
* @returns {void}
|
||||
*/
|
||||
const finishWithoutResolve = log => {
|
||||
/**
|
||||
* @type {ErrorWithDetail}
|
||||
*/
|
||||
const error = new Error("Can't " + message);
|
||||
error.details = log.join("\n");
|
||||
this.hooks.noResolve.call(obj, error);
|
||||
return callback(error);
|
||||
};
|
||||
|
||||
if (resolveContext.log) {
|
||||
// We need log anyway to capture it in case of an error
|
||||
const parentLog = resolveContext.log;
|
||||
/** @type {string[]} */
|
||||
const log = [];
|
||||
return this.doResolve(
|
||||
this.hooks.resolve,
|
||||
obj,
|
||||
message,
|
||||
{
|
||||
log: msg => {
|
||||
parentLog(msg);
|
||||
log.push(msg);
|
||||
},
|
||||
yield: yield_,
|
||||
fileDependencies: resolveContext.fileDependencies,
|
||||
contextDependencies: resolveContext.contextDependencies,
|
||||
missingDependencies: resolveContext.missingDependencies,
|
||||
stack: resolveContext.stack
|
||||
},
|
||||
(err, result) => {
|
||||
if (err) return callback(err);
|
||||
|
||||
if (yieldCalled || (result && yield_)) {
|
||||
return /** @type {ResolveContextYield} */ (finishYield)(
|
||||
/** @type {ResolveRequest} */ (result)
|
||||
);
|
||||
}
|
||||
|
||||
if (result) return finishResolved(result);
|
||||
|
||||
return finishWithoutResolve(log);
|
||||
}
|
||||
);
|
||||
} else {
|
||||
// Try to resolve assuming there is no error
|
||||
// We don't log stuff in this case
|
||||
return this.doResolve(
|
||||
this.hooks.resolve,
|
||||
obj,
|
||||
message,
|
||||
{
|
||||
log: undefined,
|
||||
yield: yield_,
|
||||
fileDependencies: resolveContext.fileDependencies,
|
||||
contextDependencies: resolveContext.contextDependencies,
|
||||
missingDependencies: resolveContext.missingDependencies,
|
||||
stack: resolveContext.stack
|
||||
},
|
||||
(err, result) => {
|
||||
if (err) return callback(err);
|
||||
|
||||
if (yieldCalled || (result && yield_)) {
|
||||
return /** @type {ResolveContextYield} */ (finishYield)(
|
||||
/** @type {ResolveRequest} */ (result)
|
||||
);
|
||||
}
|
||||
|
||||
if (result) return finishResolved(result);
|
||||
|
||||
// log is missing for the error details
|
||||
// so we redo the resolving for the log info
|
||||
// this is more expensive to the success case
|
||||
// is assumed by default
|
||||
/** @type {string[]} */
|
||||
const log = [];
|
||||
|
||||
return this.doResolve(
|
||||
this.hooks.resolve,
|
||||
obj,
|
||||
message,
|
||||
{
|
||||
log: msg => log.push(msg),
|
||||
yield: yield_,
|
||||
stack: resolveContext.stack
|
||||
},
|
||||
(err, result) => {
|
||||
if (err) return callback(err);
|
||||
|
||||
// In a case that there is a race condition and yield will be called
|
||||
if (yieldCalled || (result && yield_)) {
|
||||
return /** @type {ResolveContextYield} */ (finishYield)(
|
||||
/** @type {ResolveRequest} */ (result)
|
||||
);
|
||||
}
|
||||
|
||||
return finishWithoutResolve(log);
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ResolveStepHook} hook hook
|
||||
* @param {ResolveRequest} request request
|
||||
* @param {null|string} message string
|
||||
* @param {ResolveContext} resolveContext resolver context
|
||||
* @param {(err?: null|Error, result?: ResolveRequest) => void} callback callback
|
||||
* @returns {void}
|
||||
*/
|
||||
doResolve(hook, request, message, resolveContext, callback) {
|
||||
const stackEntry = Resolver.createStackEntry(hook, request);
|
||||
|
||||
/** @type {Set<string> | undefined} */
|
||||
let newStack;
|
||||
if (resolveContext.stack) {
|
||||
newStack = new Set(resolveContext.stack);
|
||||
if (resolveContext.stack.has(stackEntry)) {
|
||||
/**
|
||||
* Prevent recursion
|
||||
* @type {Error & {recursion?: boolean}}
|
||||
*/
|
||||
const recursionError = new Error(
|
||||
"Recursion in resolving\nStack:\n " +
|
||||
Array.from(newStack).join("\n ")
|
||||
);
|
||||
recursionError.recursion = true;
|
||||
if (resolveContext.log)
|
||||
resolveContext.log("abort resolving because of recursion");
|
||||
return callback(recursionError);
|
||||
}
|
||||
newStack.add(stackEntry);
|
||||
} else {
|
||||
// creating a set with new Set([item])
|
||||
// allocates a new array that has to be garbage collected
|
||||
// this is an EXTREMELY hot path, so let's avoid it
|
||||
newStack = new Set();
|
||||
newStack.add(stackEntry);
|
||||
}
|
||||
this.hooks.resolveStep.call(hook, request);
|
||||
|
||||
if (hook.isUsed()) {
|
||||
const innerContext = createInnerContext(
|
||||
{
|
||||
log: resolveContext.log,
|
||||
yield: resolveContext.yield,
|
||||
fileDependencies: resolveContext.fileDependencies,
|
||||
contextDependencies: resolveContext.contextDependencies,
|
||||
missingDependencies: resolveContext.missingDependencies,
|
||||
stack: newStack
|
||||
},
|
||||
message
|
||||
);
|
||||
return hook.callAsync(request, innerContext, (err, result) => {
|
||||
if (err) return callback(err);
|
||||
if (result) return callback(null, result);
|
||||
callback();
|
||||
});
|
||||
} else {
|
||||
callback();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} identifier identifier
|
||||
* @returns {ParsedIdentifier} parsed identifier
|
||||
*/
|
||||
parse(identifier) {
|
||||
const part = {
|
||||
request: "",
|
||||
query: "",
|
||||
fragment: "",
|
||||
module: false,
|
||||
directory: false,
|
||||
file: false,
|
||||
internal: false
|
||||
};
|
||||
|
||||
const parsedIdentifier = parseIdentifier(identifier);
|
||||
|
||||
if (!parsedIdentifier) return part;
|
||||
|
||||
[part.request, part.query, part.fragment] = parsedIdentifier;
|
||||
|
||||
if (part.request.length > 0) {
|
||||
part.internal = this.isPrivate(identifier);
|
||||
part.module = this.isModule(part.request);
|
||||
part.directory = this.isDirectory(part.request);
|
||||
if (part.directory) {
|
||||
part.request = part.request.slice(0, -1);
|
||||
}
|
||||
}
|
||||
|
||||
return part;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} path path
|
||||
* @returns {boolean} true, if the path is a module
|
||||
*/
|
||||
isModule(path) {
|
||||
return getType(path) === PathType.Normal;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} path path
|
||||
* @returns {boolean} true, if the path is private
|
||||
*/
|
||||
isPrivate(path) {
|
||||
return getType(path) === PathType.Internal;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} path a path
|
||||
* @returns {boolean} true, if the path is a directory path
|
||||
*/
|
||||
isDirectory(path) {
|
||||
return path.endsWith("/");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} path path
|
||||
* @param {string} request request
|
||||
* @returns {string} joined path
|
||||
*/
|
||||
join(path, request) {
|
||||
return join(path, request);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} path path
|
||||
* @returns {string} normalized path
|
||||
*/
|
||||
normalize(path) {
|
||||
return normalize(path);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Resolver;
|
||||
@@ -0,0 +1,99 @@
|
||||
/**
|
||||
* @fileoverview Rule to flag use of an object property of the global object (Math and JSON) as a function
|
||||
* @author James Allardice
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const {
|
||||
CALL,
|
||||
CONSTRUCT,
|
||||
ReferenceTracker,
|
||||
} = require("@eslint-community/eslint-utils");
|
||||
const getPropertyName = require("./utils/ast-utils").getStaticPropertyName;
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const nonCallableGlobals = ["Atomics", "JSON", "Math", "Reflect", "Intl"];
|
||||
|
||||
/**
|
||||
* Returns the name of the node to report
|
||||
* @param {ASTNode} node A node to report
|
||||
* @returns {string} name to report
|
||||
*/
|
||||
function getReportNodeName(node) {
|
||||
if (node.type === "ChainExpression") {
|
||||
return getReportNodeName(node.expression);
|
||||
}
|
||||
if (node.type === "MemberExpression") {
|
||||
return getPropertyName(node);
|
||||
}
|
||||
return node.name;
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/** @type {import('../shared/types').Rule} */
|
||||
module.exports = {
|
||||
meta: {
|
||||
type: "problem",
|
||||
|
||||
docs: {
|
||||
description:
|
||||
"Disallow calling global object properties as functions",
|
||||
recommended: true,
|
||||
url: "https://eslint.org/docs/latest/rules/no-obj-calls",
|
||||
},
|
||||
|
||||
schema: [],
|
||||
|
||||
messages: {
|
||||
unexpectedCall: "'{{name}}' is not a function.",
|
||||
unexpectedRefCall:
|
||||
"'{{name}}' is reference to '{{ref}}', which is not a function.",
|
||||
},
|
||||
},
|
||||
|
||||
create(context) {
|
||||
const sourceCode = context.sourceCode;
|
||||
|
||||
return {
|
||||
Program(node) {
|
||||
const scope = sourceCode.getScope(node);
|
||||
const tracker = new ReferenceTracker(scope);
|
||||
const traceMap = {};
|
||||
|
||||
for (const g of nonCallableGlobals) {
|
||||
traceMap[g] = {
|
||||
[CALL]: true,
|
||||
[CONSTRUCT]: true,
|
||||
};
|
||||
}
|
||||
|
||||
for (const {
|
||||
node: refNode,
|
||||
path,
|
||||
} of tracker.iterateGlobalReferences(traceMap)) {
|
||||
const name = getReportNodeName(refNode.callee);
|
||||
const ref = path[0];
|
||||
const messageId =
|
||||
name === ref ? "unexpectedCall" : "unexpectedRefCall";
|
||||
|
||||
context.report({
|
||||
node: refNode,
|
||||
messageId,
|
||||
data: { name, ref },
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,3 @@
|
||||
const compareBuild = require('./compare-build')
|
||||
const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose))
|
||||
module.exports = rsort
|
||||
@@ -0,0 +1,27 @@
|
||||
type VisitorKeys$1 = {
|
||||
readonly [type: string]: readonly string[];
|
||||
};
|
||||
/**
|
||||
* @typedef {{ readonly [type: string]: ReadonlyArray<string> }} VisitorKeys
|
||||
*/
|
||||
/**
|
||||
* @type {VisitorKeys}
|
||||
*/
|
||||
declare const KEYS: VisitorKeys$1;
|
||||
|
||||
/**
|
||||
* Get visitor keys of a given node.
|
||||
* @param {object} node The AST node to get keys.
|
||||
* @returns {readonly string[]} Visitor keys of the node.
|
||||
*/
|
||||
declare function getKeys(node: object): readonly string[];
|
||||
/**
|
||||
* Make the union set with `KEYS` and given keys.
|
||||
* @param {VisitorKeys} additionalKeys The additional keys.
|
||||
* @returns {VisitorKeys} The union set.
|
||||
*/
|
||||
declare function unionWith(additionalKeys: VisitorKeys): VisitorKeys;
|
||||
|
||||
type VisitorKeys = VisitorKeys$1;
|
||||
|
||||
export { KEYS, VisitorKeys, getKeys, unionWith };
|
||||
@@ -0,0 +1,197 @@
|
||||
"use strict";
|
||||
'use client';
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.default = TextLayer;
|
||||
const jsx_runtime_1 = require("react/jsx-runtime");
|
||||
const react_1 = require("react");
|
||||
const make_cancellable_promise_1 = __importDefault(require("make-cancellable-promise"));
|
||||
const clsx_1 = __importDefault(require("clsx"));
|
||||
const tiny_invariant_1 = __importDefault(require("tiny-invariant"));
|
||||
const warning_1 = __importDefault(require("warning"));
|
||||
const pdfjs = __importStar(require("pdfjs-dist"));
|
||||
const usePageContext_js_1 = __importDefault(require("../shared/hooks/usePageContext.js"));
|
||||
const useResolver_js_1 = __importDefault(require("../shared/hooks/useResolver.js"));
|
||||
const utils_js_1 = require("../shared/utils.js");
|
||||
function isTextItem(item) {
|
||||
return 'str' in item;
|
||||
}
|
||||
function TextLayer() {
|
||||
const pageContext = (0, usePageContext_js_1.default)();
|
||||
(0, tiny_invariant_1.default)(pageContext, 'Unable to find Page context.');
|
||||
const { customTextRenderer, onGetTextError, onGetTextSuccess, onRenderTextLayerError, onRenderTextLayerSuccess, page, pageIndex, pageNumber, rotate, scale, } = pageContext;
|
||||
(0, tiny_invariant_1.default)(page, 'Attempted to load page text content, but no page was specified.');
|
||||
const [textContentState, textContentDispatch] = (0, useResolver_js_1.default)();
|
||||
const { value: textContent, error: textContentError } = textContentState;
|
||||
const layerElement = (0, react_1.useRef)(null);
|
||||
(0, warning_1.default)(Number.parseInt(window.getComputedStyle(document.body).getPropertyValue('--react-pdf-text-layer'), 10) === 1, 'TextLayer styles not found. Read more: https://github.com/wojtekmaj/react-pdf#support-for-text-layer');
|
||||
/**
|
||||
* Called when a page text content is read successfully
|
||||
*/
|
||||
function onLoadSuccess() {
|
||||
if (!textContent) {
|
||||
// Impossible, but TypeScript doesn't know that
|
||||
return;
|
||||
}
|
||||
if (onGetTextSuccess) {
|
||||
onGetTextSuccess(textContent);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Called when a page text content failed to read successfully
|
||||
*/
|
||||
function onLoadError() {
|
||||
if (!textContentError) {
|
||||
// Impossible, but TypeScript doesn't know that
|
||||
return;
|
||||
}
|
||||
(0, warning_1.default)(false, textContentError.toString());
|
||||
if (onGetTextError) {
|
||||
onGetTextError(textContentError);
|
||||
}
|
||||
}
|
||||
// biome-ignore lint/correctness/useExhaustiveDependencies: useEffect intentionally triggered on page change
|
||||
(0, react_1.useEffect)(function resetTextContent() {
|
||||
textContentDispatch({ type: 'RESET' });
|
||||
}, [page, textContentDispatch]);
|
||||
(0, react_1.useEffect)(function loadTextContent() {
|
||||
if (!page) {
|
||||
return;
|
||||
}
|
||||
const cancellable = (0, make_cancellable_promise_1.default)(page.getTextContent());
|
||||
const runningTask = cancellable;
|
||||
cancellable.promise
|
||||
.then((nextTextContent) => {
|
||||
textContentDispatch({ type: 'RESOLVE', value: nextTextContent });
|
||||
})
|
||||
.catch((error) => {
|
||||
textContentDispatch({ type: 'REJECT', error });
|
||||
});
|
||||
return () => (0, utils_js_1.cancelRunningTask)(runningTask);
|
||||
}, [page, textContentDispatch]);
|
||||
// biome-ignore lint/correctness/useExhaustiveDependencies: Ommitted callbacks so they are not called every time they change
|
||||
(0, react_1.useEffect)(() => {
|
||||
if (textContent === undefined) {
|
||||
return;
|
||||
}
|
||||
if (textContent === false) {
|
||||
onLoadError();
|
||||
return;
|
||||
}
|
||||
onLoadSuccess();
|
||||
}, [textContent]);
|
||||
/**
|
||||
* Called when a text layer is rendered successfully
|
||||
*/
|
||||
const onRenderSuccess = (0, react_1.useCallback)(() => {
|
||||
if (onRenderTextLayerSuccess) {
|
||||
onRenderTextLayerSuccess();
|
||||
}
|
||||
}, [onRenderTextLayerSuccess]);
|
||||
/**
|
||||
* Called when a text layer failed to render successfully
|
||||
*/
|
||||
const onRenderError = (0, react_1.useCallback)((error) => {
|
||||
(0, warning_1.default)(false, error.toString());
|
||||
if (onRenderTextLayerError) {
|
||||
onRenderTextLayerError(error);
|
||||
}
|
||||
}, [onRenderTextLayerError]);
|
||||
function onMouseDown() {
|
||||
const layer = layerElement.current;
|
||||
if (!layer) {
|
||||
return;
|
||||
}
|
||||
layer.classList.add('selecting');
|
||||
}
|
||||
function onMouseUp() {
|
||||
const layer = layerElement.current;
|
||||
if (!layer) {
|
||||
return;
|
||||
}
|
||||
layer.classList.remove('selecting');
|
||||
}
|
||||
const viewport = (0, react_1.useMemo)(() => page.getViewport({ scale, rotation: rotate }), [page, rotate, scale]);
|
||||
(0, react_1.useLayoutEffect)(function renderTextLayer() {
|
||||
if (!page || !textContent) {
|
||||
return;
|
||||
}
|
||||
const { current: layer } = layerElement;
|
||||
if (!layer) {
|
||||
return;
|
||||
}
|
||||
layer.innerHTML = '';
|
||||
const textContentSource = page.streamTextContent({ includeMarkedContent: true });
|
||||
const parameters = {
|
||||
container: layer,
|
||||
textContentSource,
|
||||
viewport,
|
||||
};
|
||||
const cancellable = new pdfjs.TextLayer(parameters);
|
||||
const runningTask = cancellable;
|
||||
cancellable
|
||||
.render()
|
||||
.then(() => {
|
||||
const end = document.createElement('div');
|
||||
end.className = 'endOfContent';
|
||||
layer.append(end);
|
||||
const layerChildren = layer.querySelectorAll('[role="presentation"]');
|
||||
if (customTextRenderer) {
|
||||
let index = 0;
|
||||
textContent.items.forEach((item, itemIndex) => {
|
||||
if (!isTextItem(item)) {
|
||||
return;
|
||||
}
|
||||
const child = layerChildren[index];
|
||||
if (!child) {
|
||||
return;
|
||||
}
|
||||
const content = customTextRenderer(Object.assign({ pageIndex,
|
||||
pageNumber,
|
||||
itemIndex }, item));
|
||||
child.innerHTML = content;
|
||||
index += item.str && item.hasEOL ? 2 : 1;
|
||||
});
|
||||
}
|
||||
// Intentional immediate callback
|
||||
onRenderSuccess();
|
||||
})
|
||||
.catch(onRenderError);
|
||||
return () => (0, utils_js_1.cancelRunningTask)(runningTask);
|
||||
}, [
|
||||
customTextRenderer,
|
||||
onRenderError,
|
||||
onRenderSuccess,
|
||||
page,
|
||||
pageIndex,
|
||||
pageNumber,
|
||||
textContent,
|
||||
viewport,
|
||||
]);
|
||||
return ((0, jsx_runtime_1.jsx)("div", { className: (0, clsx_1.default)('react-pdf__Page__textContent', 'textLayer'), onMouseUp: onMouseUp, onMouseDown: onMouseDown, ref: layerElement }));
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"name": "yocto-queue",
|
||||
"version": "0.1.0",
|
||||
"description": "Tiny queue data structure",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/yocto-queue",
|
||||
"funding": "https://github.com/sponsors/sindresorhus",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "https://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava && tsd"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"queue",
|
||||
"data",
|
||||
"structure",
|
||||
"algorithm",
|
||||
"queues",
|
||||
"queuing",
|
||||
"list",
|
||||
"array",
|
||||
"linkedlist",
|
||||
"fifo",
|
||||
"enqueue",
|
||||
"dequeue",
|
||||
"data-structure"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "^2.4.0",
|
||||
"tsd": "^0.13.1",
|
||||
"xo": "^0.35.0"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={C:{"4":0.00294,"58":0.00881,"115":0.61048,"125":0.00587,"127":0.00294,"128":0.00294,"130":0.00294,"133":0.21426,"134":0.00587,"135":0.56646,"136":1.17987,"137":0.01468,_:"2 3 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 116 117 118 119 120 121 122 123 124 126 129 131 132 138 139 140 3.5 3.6"},D:{"39":0.00587,"40":0.00294,"41":0.00294,"42":0.00294,"43":0.00294,"44":0.00294,"45":0.00587,"46":0.00294,"47":0.02055,"48":0.00294,"49":0.00587,"50":0.00294,"51":0.00294,"52":0.00294,"53":0.00294,"54":0.00294,"55":0.00294,"56":0.00587,"57":0.00294,"58":0.00587,"59":0.00587,"60":0.00587,"69":0.00294,"70":0.00294,"73":0.00881,"79":0.00587,"83":0.01468,"87":0.00587,"88":0.00294,"89":0.02055,"92":0.00587,"93":0.01761,"94":0.00881,"95":0.00587,"96":0.00294,"97":0.00294,"100":0.00294,"102":0.00294,"103":0.02055,"105":0.00294,"109":0.27589,"111":0.03522,"114":0.00587,"116":0.08805,"118":0.00294,"119":0.00294,"120":0.01174,"121":0.00881,"122":0.03522,"123":0.05577,"124":0.11153,"125":0.26415,"126":0.08805,"128":0.02055,"129":0.00587,"130":0.02055,"131":0.15262,"132":0.49895,"133":4.9807,"134":10.35468,"135":0.01468,_:"4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 61 62 63 64 65 66 67 68 71 72 74 75 76 77 78 80 81 84 85 86 90 91 98 99 101 104 106 107 108 110 112 113 115 117 127 136 137 138"},F:{"87":0.01174,"114":0.00294,"116":0.11447,"117":0.77484,_:"9 11 12 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 60 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 115 9.5-9.6 10.0-10.1 10.5 10.6 11.1 11.5 11.6 12.1"},B:{"16":0.00294,"18":0.00294,"84":0.00294,"90":0.00294,"92":0.00881,"100":0.00294,"109":0.02055,"112":0.00294,"114":0.00881,"116":0.00294,"117":0.00294,"118":0.00294,"120":0.00294,"122":0.01468,"124":0.00294,"125":0.00587,"126":0.00294,"127":0.00294,"128":0.00587,"130":0.00294,"131":0.01468,"132":0.03522,"133":1.36478,"134":2.80586,_:"12 13 14 15 17 79 80 81 83 85 86 87 88 89 91 93 94 95 96 97 98 99 101 102 103 104 105 106 107 108 110 111 113 115 119 121 123 129"},E:{"14":0.00294,_:"0 4 5 6 7 8 9 10 11 12 13 15 3.1 3.2 5.1 6.1 7.1 9.1 10.1 11.1 15.4 15.5 16.2 17.3","12.1":0.00881,"13.1":0.01174,"14.1":0.00587,"15.1":0.00294,"15.2-15.3":0.00881,"15.6":0.06457,"16.0":0.02935,"16.1":0.00294,"16.3":0.00881,"16.4":0.00587,"16.5":0.00587,"16.6":0.12034,"17.0":0.00294,"17.1":0.03229,"17.2":0.00587,"17.4":0.00587,"17.5":0.02348,"17.6":0.09686,"18.0":0.00294,"18.1":0.01761,"18.2":0.01468,"18.3":1.0566,"18.4":0.01761},G:{"8":0,"3.2":0,"4.0-4.1":0,"4.2-4.3":0.00225,"5.0-5.1":0,"6.0-6.1":0.00675,"7.0-7.1":0.0045,"8.1-8.4":0,"9.0-9.2":0.00337,"9.3":0.01575,"10.0-10.2":0.00112,"10.3":0.02587,"11.0-11.2":0.11922,"11.3-11.4":0.00787,"12.0-12.1":0.0045,"12.2-12.5":0.11135,"13.0-13.1":0.00225,"13.2":0.00337,"13.3":0.0045,"13.4-13.7":0.01575,"14.0-14.4":0.03937,"14.5-14.8":0.04724,"15.0-15.1":0.02587,"15.2-15.3":0.02587,"15.4":0.03149,"15.5":0.03599,"15.6-15.8":0.44315,"16.0":0.06299,"16.1":0.12935,"16.2":0.06748,"16.3":0.11697,"16.4":0.02587,"16.5":0.04836,"16.6-16.7":0.52526,"17.0":0.03149,"17.1":0.05624,"17.2":0.04274,"17.3":0.05961,"17.4":0.11922,"17.5":0.26544,"17.6-17.7":0.77045,"18.0":0.21595,"18.1":0.70634,"18.2":0.31605,"18.3":6.60565,"18.4":0.09785},P:{"4":0.04152,"20":0.01038,"21":0.04152,"22":0.09342,"23":0.07266,"24":0.49826,"25":0.07266,"26":0.29065,"27":4.75426,_:"5.0-5.4 8.2 9.2 10.1 11.1-11.2 12.0 14.0 15.0 18.0","6.2-6.4":0.03114,"7.2-7.4":0.06228,"13.0":0.01038,"16.0":0.02076,"17.0":0.04152,"19.0":0.04152},I:{"0":0.00705,"3":0,"4":0,"2.1":0,"2.2":0,"2.3":0,"4.1":0,"4.2-4.3":0,"4.4":0,"4.4.3-4.4.4":0.00001},K:{"0":0.31086,_:"10 11 12 11.1 11.5 12.1"},A:{"11":0.07338,_:"6 7 8 9 10 5.5"},S:{_:"2.5 3.0-3.1"},J:{_:"7 10"},N:{_:"10 11"},R:{_:"0"},M:{"0":0.05652},Q:{"14.9":0.24021},O:{"0":0.2826},H:{"0":0},L:{"0":54.16867}};
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"names":["_setPrototypeOf","require","_inheritsLoose","subClass","superClass","prototype","Object","create","constructor","setPrototypeOf"],"sources":["../../src/helpers/inheritsLoose.ts"],"sourcesContent":["/* @minVersion 7.0.0-beta.0 */\n\nimport setPrototypeOf from \"./setPrototypeOf.ts\";\n\nexport default function _inheritsLoose(\n subClass: Function,\n superClass: Function,\n) {\n subClass.prototype = Object.create(superClass.prototype);\n subClass.prototype.constructor = subClass;\n setPrototypeOf(subClass, superClass);\n}\n"],"mappings":";;;;;;AAEA,IAAAA,eAAA,GAAAC,OAAA;AAEe,SAASC,cAAcA,CACpCC,QAAkB,EAClBC,UAAoB,EACpB;EACAD,QAAQ,CAACE,SAAS,GAAGC,MAAM,CAACC,MAAM,CAACH,UAAU,CAACC,SAAS,CAAC;EACxDF,QAAQ,CAACE,SAAS,CAACG,WAAW,GAAGL,QAAQ;EACzC,IAAAM,uBAAc,EAACN,QAAQ,EAAEC,UAAU,CAAC;AACtC","ignoreList":[]}
|
||||
@@ -0,0 +1,121 @@
|
||||
'use strict';
|
||||
|
||||
/*eslint-disable max-len*/
|
||||
|
||||
var YAMLException = require('./exception');
|
||||
var Type = require('./type');
|
||||
|
||||
|
||||
function compileList(schema, name) {
|
||||
var result = [];
|
||||
|
||||
schema[name].forEach(function (currentType) {
|
||||
var newIndex = result.length;
|
||||
|
||||
result.forEach(function (previousType, previousIndex) {
|
||||
if (previousType.tag === currentType.tag &&
|
||||
previousType.kind === currentType.kind &&
|
||||
previousType.multi === currentType.multi) {
|
||||
|
||||
newIndex = previousIndex;
|
||||
}
|
||||
});
|
||||
|
||||
result[newIndex] = currentType;
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
function compileMap(/* lists... */) {
|
||||
var result = {
|
||||
scalar: {},
|
||||
sequence: {},
|
||||
mapping: {},
|
||||
fallback: {},
|
||||
multi: {
|
||||
scalar: [],
|
||||
sequence: [],
|
||||
mapping: [],
|
||||
fallback: []
|
||||
}
|
||||
}, index, length;
|
||||
|
||||
function collectType(type) {
|
||||
if (type.multi) {
|
||||
result.multi[type.kind].push(type);
|
||||
result.multi['fallback'].push(type);
|
||||
} else {
|
||||
result[type.kind][type.tag] = result['fallback'][type.tag] = type;
|
||||
}
|
||||
}
|
||||
|
||||
for (index = 0, length = arguments.length; index < length; index += 1) {
|
||||
arguments[index].forEach(collectType);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
function Schema(definition) {
|
||||
return this.extend(definition);
|
||||
}
|
||||
|
||||
|
||||
Schema.prototype.extend = function extend(definition) {
|
||||
var implicit = [];
|
||||
var explicit = [];
|
||||
|
||||
if (definition instanceof Type) {
|
||||
// Schema.extend(type)
|
||||
explicit.push(definition);
|
||||
|
||||
} else if (Array.isArray(definition)) {
|
||||
// Schema.extend([ type1, type2, ... ])
|
||||
explicit = explicit.concat(definition);
|
||||
|
||||
} else if (definition && (Array.isArray(definition.implicit) || Array.isArray(definition.explicit))) {
|
||||
// Schema.extend({ explicit: [ type1, type2, ... ], implicit: [ type1, type2, ... ] })
|
||||
if (definition.implicit) implicit = implicit.concat(definition.implicit);
|
||||
if (definition.explicit) explicit = explicit.concat(definition.explicit);
|
||||
|
||||
} else {
|
||||
throw new YAMLException('Schema.extend argument should be a Type, [ Type ], ' +
|
||||
'or a schema definition ({ implicit: [...], explicit: [...] })');
|
||||
}
|
||||
|
||||
implicit.forEach(function (type) {
|
||||
if (!(type instanceof Type)) {
|
||||
throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.');
|
||||
}
|
||||
|
||||
if (type.loadKind && type.loadKind !== 'scalar') {
|
||||
throw new YAMLException('There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.');
|
||||
}
|
||||
|
||||
if (type.multi) {
|
||||
throw new YAMLException('There is a multi type in the implicit list of a schema. Multi tags can only be listed as explicit.');
|
||||
}
|
||||
});
|
||||
|
||||
explicit.forEach(function (type) {
|
||||
if (!(type instanceof Type)) {
|
||||
throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.');
|
||||
}
|
||||
});
|
||||
|
||||
var result = Object.create(Schema.prototype);
|
||||
|
||||
result.implicit = (this.implicit || []).concat(implicit);
|
||||
result.explicit = (this.explicit || []).concat(explicit);
|
||||
|
||||
result.compiledImplicit = compileList(result, 'implicit');
|
||||
result.compiledExplicit = compileList(result, 'explicit');
|
||||
result.compiledTypeMap = compileMap(result.compiledImplicit, result.compiledExplicit);
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
|
||||
module.exports = Schema;
|
||||
@@ -0,0 +1,183 @@
|
||||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const forEachBail = require("./forEachBail");
|
||||
const { PathType, getType } = require("./util/path");
|
||||
|
||||
/** @typedef {import("./Resolver")} Resolver */
|
||||
/** @typedef {import("./Resolver").ResolveRequest} ResolveRequest */
|
||||
/** @typedef {import("./Resolver").ResolveStepHook} ResolveStepHook */
|
||||
/** @typedef {string | Array<string> | false} Alias */
|
||||
/** @typedef {{alias: Alias, name: string, onlyModule?: boolean}} AliasOption */
|
||||
|
||||
module.exports = class AliasPlugin {
|
||||
/**
|
||||
* @param {string | ResolveStepHook} source source
|
||||
* @param {AliasOption | Array<AliasOption>} options options
|
||||
* @param {string | ResolveStepHook} target target
|
||||
*/
|
||||
constructor(source, options, target) {
|
||||
this.source = source;
|
||||
this.options = Array.isArray(options) ? options : [options];
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Resolver} resolver the resolver
|
||||
* @returns {void}
|
||||
*/
|
||||
apply(resolver) {
|
||||
const target = resolver.ensureHook(this.target);
|
||||
/**
|
||||
* @param {string} maybeAbsolutePath path
|
||||
* @returns {null|string} absolute path with slash ending
|
||||
*/
|
||||
const getAbsolutePathWithSlashEnding = maybeAbsolutePath => {
|
||||
const type = getType(maybeAbsolutePath);
|
||||
if (type === PathType.AbsolutePosix || type === PathType.AbsoluteWin) {
|
||||
return resolver.join(maybeAbsolutePath, "_").slice(0, -1);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
/**
|
||||
* @param {string} path path
|
||||
* @param {string} maybeSubPath sub path
|
||||
* @returns {boolean} true, if path is sub path
|
||||
*/
|
||||
const isSubPath = (path, maybeSubPath) => {
|
||||
const absolutePath = getAbsolutePathWithSlashEnding(maybeSubPath);
|
||||
if (!absolutePath) return false;
|
||||
return path.startsWith(absolutePath);
|
||||
};
|
||||
resolver
|
||||
.getHook(this.source)
|
||||
.tapAsync("AliasPlugin", (request, resolveContext, callback) => {
|
||||
const innerRequest = request.request || request.path;
|
||||
if (!innerRequest) return callback();
|
||||
|
||||
forEachBail(
|
||||
this.options,
|
||||
(item, callback) => {
|
||||
/** @type {boolean} */
|
||||
let shouldStop = false;
|
||||
|
||||
const matchRequest =
|
||||
innerRequest === item.name ||
|
||||
(!item.onlyModule &&
|
||||
(request.request
|
||||
? innerRequest.startsWith(`${item.name}/`)
|
||||
: isSubPath(innerRequest, item.name)));
|
||||
|
||||
const splitName = item.name.split("*");
|
||||
const matchWildcard = !item.onlyModule && splitName.length === 2;
|
||||
|
||||
if (matchRequest || matchWildcard) {
|
||||
/**
|
||||
* @param {Alias} alias alias
|
||||
* @param {(err?: null|Error, result?: null|ResolveRequest) => void} callback callback
|
||||
* @returns {void}
|
||||
*/
|
||||
const resolveWithAlias = (alias, callback) => {
|
||||
if (alias === false) {
|
||||
/** @type {ResolveRequest} */
|
||||
const ignoreObj = {
|
||||
...request,
|
||||
path: false
|
||||
};
|
||||
if (typeof resolveContext.yield === "function") {
|
||||
resolveContext.yield(ignoreObj);
|
||||
return callback(null, null);
|
||||
}
|
||||
return callback(null, ignoreObj);
|
||||
}
|
||||
|
||||
let newRequestStr;
|
||||
|
||||
const [prefix, suffix] = splitName;
|
||||
if (
|
||||
matchWildcard &&
|
||||
innerRequest.startsWith(prefix) &&
|
||||
innerRequest.endsWith(suffix)
|
||||
) {
|
||||
const match = innerRequest.slice(
|
||||
prefix.length,
|
||||
innerRequest.length - suffix.length
|
||||
);
|
||||
newRequestStr = item.alias.toString().replace("*", match);
|
||||
}
|
||||
|
||||
if (
|
||||
matchRequest &&
|
||||
innerRequest !== alias &&
|
||||
!innerRequest.startsWith(alias + "/")
|
||||
) {
|
||||
/** @type {string} */
|
||||
const remainingRequest = innerRequest.slice(item.name.length);
|
||||
newRequestStr = alias + remainingRequest;
|
||||
}
|
||||
|
||||
if (newRequestStr !== undefined) {
|
||||
shouldStop = true;
|
||||
/** @type {ResolveRequest} */
|
||||
const obj = {
|
||||
...request,
|
||||
request: newRequestStr,
|
||||
fullySpecified: false
|
||||
};
|
||||
return resolver.doResolve(
|
||||
target,
|
||||
obj,
|
||||
"aliased with mapping '" +
|
||||
item.name +
|
||||
"': '" +
|
||||
alias +
|
||||
"' to '" +
|
||||
newRequestStr +
|
||||
"'",
|
||||
resolveContext,
|
||||
(err, result) => {
|
||||
if (err) return callback(err);
|
||||
if (result) return callback(null, result);
|
||||
return callback();
|
||||
}
|
||||
);
|
||||
}
|
||||
return callback();
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {null|Error} [err] error
|
||||
* @param {null|ResolveRequest} [result] result
|
||||
* @returns {void}
|
||||
*/
|
||||
const stoppingCallback = (err, result) => {
|
||||
if (err) return callback(err);
|
||||
|
||||
if (result) return callback(null, result);
|
||||
// Don't allow other aliasing or raw request
|
||||
if (shouldStop) return callback(null, null);
|
||||
return callback();
|
||||
};
|
||||
|
||||
if (Array.isArray(item.alias)) {
|
||||
return forEachBail(
|
||||
item.alias,
|
||||
resolveWithAlias,
|
||||
stoppingCallback
|
||||
);
|
||||
} else {
|
||||
return resolveWithAlias(item.alias, stoppingCallback);
|
||||
}
|
||||
}
|
||||
|
||||
return callback();
|
||||
},
|
||||
callback
|
||||
);
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,489 @@
|
||||
"use strict";
|
||||
|
||||
const promisify = require("util.promisify");
|
||||
const gensync = require("../");
|
||||
|
||||
const TEST_ERROR = new Error("TEST_ERROR");
|
||||
|
||||
const DID_ERROR = new Error("DID_ERROR");
|
||||
|
||||
const doSuccess = gensync({
|
||||
sync: () => 42,
|
||||
async: () => Promise.resolve(42),
|
||||
});
|
||||
|
||||
const doError = gensync({
|
||||
sync: () => {
|
||||
throw DID_ERROR;
|
||||
},
|
||||
async: () => Promise.reject(DID_ERROR),
|
||||
});
|
||||
|
||||
function throwTestError() {
|
||||
throw TEST_ERROR;
|
||||
}
|
||||
|
||||
async function expectResult(
|
||||
fn,
|
||||
arg,
|
||||
{ error, value, expectSync = false, syncErrback = expectSync }
|
||||
) {
|
||||
if (!expectSync) {
|
||||
expect(() => fn.sync(arg)).toThrow(TEST_ERROR);
|
||||
} else if (error) {
|
||||
expect(() => fn.sync(arg)).toThrow(error);
|
||||
} else {
|
||||
expect(fn.sync(arg)).toBe(value);
|
||||
}
|
||||
|
||||
if (error) {
|
||||
await expect(fn.async(arg)).rejects.toBe(error);
|
||||
} else {
|
||||
await expect(fn.async(arg)).resolves.toBe(value);
|
||||
}
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
let sync = true;
|
||||
fn.errback(arg, (err, val) => {
|
||||
try {
|
||||
expect(err).toBe(error);
|
||||
expect(val).toBe(value);
|
||||
expect(sync).toBe(syncErrback);
|
||||
|
||||
resolve();
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
});
|
||||
sync = false;
|
||||
});
|
||||
}
|
||||
|
||||
describe("gensync({})", () => {
|
||||
describe("option validation", () => {
|
||||
test("disallow async and errback handler together", () => {
|
||||
try {
|
||||
gensync({
|
||||
sync: throwTestError,
|
||||
async: throwTestError,
|
||||
errback: throwTestError,
|
||||
});
|
||||
|
||||
throwTestError();
|
||||
} catch (err) {
|
||||
expect(err.message).toMatch(
|
||||
/Expected one of either opts.async or opts.errback, but got _both_\./
|
||||
);
|
||||
expect(err.code).toBe("GENSYNC_OPTIONS_ERROR");
|
||||
}
|
||||
});
|
||||
|
||||
test("disallow missing sync handler", () => {
|
||||
try {
|
||||
gensync({
|
||||
async: throwTestError,
|
||||
});
|
||||
|
||||
throwTestError();
|
||||
} catch (err) {
|
||||
expect(err.message).toMatch(/Expected opts.sync to be a function./);
|
||||
expect(err.code).toBe("GENSYNC_OPTIONS_ERROR");
|
||||
}
|
||||
});
|
||||
|
||||
test("errback callback required", () => {
|
||||
const fn = gensync({
|
||||
sync: throwTestError,
|
||||
async: throwTestError,
|
||||
});
|
||||
|
||||
try {
|
||||
fn.errback();
|
||||
|
||||
throwTestError();
|
||||
} catch (err) {
|
||||
expect(err.message).toMatch(/function called without callback/);
|
||||
expect(err.code).toBe("GENSYNC_ERRBACK_NO_CALLBACK");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("generator function metadata", () => {
|
||||
test("automatic naming", () => {
|
||||
expect(
|
||||
gensync({
|
||||
sync: function readFileSync() {},
|
||||
async: () => {},
|
||||
}).name
|
||||
).toBe("readFile");
|
||||
expect(
|
||||
gensync({
|
||||
sync: function readFile() {},
|
||||
async: () => {},
|
||||
}).name
|
||||
).toBe("readFile");
|
||||
expect(
|
||||
gensync({
|
||||
sync: function readFileAsync() {},
|
||||
async: () => {},
|
||||
}).name
|
||||
).toBe("readFileAsync");
|
||||
|
||||
expect(
|
||||
gensync({
|
||||
sync: () => {},
|
||||
async: function readFileSync() {},
|
||||
}).name
|
||||
).toBe("readFileSync");
|
||||
expect(
|
||||
gensync({
|
||||
sync: () => {},
|
||||
async: function readFile() {},
|
||||
}).name
|
||||
).toBe("readFile");
|
||||
expect(
|
||||
gensync({
|
||||
sync: () => {},
|
||||
async: function readFileAsync() {},
|
||||
}).name
|
||||
).toBe("readFile");
|
||||
|
||||
expect(
|
||||
gensync({
|
||||
sync: () => {},
|
||||
errback: function readFileSync() {},
|
||||
}).name
|
||||
).toBe("readFileSync");
|
||||
expect(
|
||||
gensync({
|
||||
sync: () => {},
|
||||
errback: function readFile() {},
|
||||
}).name
|
||||
).toBe("readFile");
|
||||
expect(
|
||||
gensync({
|
||||
sync: () => {},
|
||||
errback: function readFileAsync() {},
|
||||
}).name
|
||||
).toBe("readFileAsync");
|
||||
});
|
||||
|
||||
test("explicit naming", () => {
|
||||
expect(
|
||||
gensync({
|
||||
name: "readFile",
|
||||
sync: () => {},
|
||||
async: () => {},
|
||||
}).name
|
||||
).toBe("readFile");
|
||||
});
|
||||
|
||||
test("default arity", () => {
|
||||
expect(
|
||||
gensync({
|
||||
sync: function(a, b, c, d, e, f, g) {
|
||||
throwTestError();
|
||||
},
|
||||
async: throwTestError,
|
||||
}).length
|
||||
).toBe(7);
|
||||
});
|
||||
|
||||
test("explicit arity", () => {
|
||||
expect(
|
||||
gensync({
|
||||
arity: 3,
|
||||
sync: throwTestError,
|
||||
async: throwTestError,
|
||||
}).length
|
||||
).toBe(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe("'sync' handler", async () => {
|
||||
test("success", async () => {
|
||||
const fn = gensync({
|
||||
sync: (...args) => JSON.stringify(args),
|
||||
});
|
||||
|
||||
await expectResult(fn, 42, { value: "[42]", expectSync: true });
|
||||
});
|
||||
|
||||
test("failure", async () => {
|
||||
const fn = gensync({
|
||||
sync: (...args) => {
|
||||
throw JSON.stringify(args);
|
||||
},
|
||||
});
|
||||
|
||||
await expectResult(fn, 42, { error: "[42]", expectSync: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe("'async' handler", async () => {
|
||||
test("success", async () => {
|
||||
const fn = gensync({
|
||||
sync: throwTestError,
|
||||
async: (...args) => Promise.resolve(JSON.stringify(args)),
|
||||
});
|
||||
|
||||
await expectResult(fn, 42, { value: "[42]" });
|
||||
});
|
||||
|
||||
test("failure", async () => {
|
||||
const fn = gensync({
|
||||
sync: throwTestError,
|
||||
async: (...args) => Promise.reject(JSON.stringify(args)),
|
||||
});
|
||||
|
||||
await expectResult(fn, 42, { error: "[42]" });
|
||||
});
|
||||
});
|
||||
|
||||
describe("'errback' sync handler", async () => {
|
||||
test("success", async () => {
|
||||
const fn = gensync({
|
||||
sync: throwTestError,
|
||||
errback: (...args) => args.pop()(null, JSON.stringify(args)),
|
||||
});
|
||||
|
||||
await expectResult(fn, 42, { value: "[42]", syncErrback: true });
|
||||
});
|
||||
|
||||
test("failure", async () => {
|
||||
const fn = gensync({
|
||||
sync: throwTestError,
|
||||
errback: (...args) => args.pop()(JSON.stringify(args)),
|
||||
});
|
||||
|
||||
await expectResult(fn, 42, { error: "[42]", syncErrback: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe("'errback' async handler", async () => {
|
||||
test("success", async () => {
|
||||
const fn = gensync({
|
||||
sync: throwTestError,
|
||||
errback: (...args) =>
|
||||
process.nextTick(() => args.pop()(null, JSON.stringify(args))),
|
||||
});
|
||||
|
||||
await expectResult(fn, 42, { value: "[42]" });
|
||||
});
|
||||
|
||||
test("failure", async () => {
|
||||
const fn = gensync({
|
||||
sync: throwTestError,
|
||||
errback: (...args) =>
|
||||
process.nextTick(() => args.pop()(JSON.stringify(args))),
|
||||
});
|
||||
|
||||
await expectResult(fn, 42, { error: "[42]" });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("gensync(function* () {})", () => {
|
||||
test("sync throw before body", async () => {
|
||||
const fn = gensync(function*(arg = throwTestError()) {});
|
||||
|
||||
await expectResult(fn, undefined, {
|
||||
error: TEST_ERROR,
|
||||
syncErrback: true,
|
||||
});
|
||||
});
|
||||
|
||||
test("sync throw inside body", async () => {
|
||||
const fn = gensync(function*() {
|
||||
throwTestError();
|
||||
});
|
||||
|
||||
await expectResult(fn, undefined, {
|
||||
error: TEST_ERROR,
|
||||
syncErrback: true,
|
||||
});
|
||||
});
|
||||
|
||||
test("async throw inside body", async () => {
|
||||
const fn = gensync(function*() {
|
||||
const val = yield* doSuccess();
|
||||
throwTestError();
|
||||
});
|
||||
|
||||
await expectResult(fn, undefined, {
|
||||
error: TEST_ERROR,
|
||||
});
|
||||
});
|
||||
|
||||
test("error inside body", async () => {
|
||||
const fn = gensync(function*() {
|
||||
yield* doError();
|
||||
});
|
||||
|
||||
await expectResult(fn, undefined, {
|
||||
error: DID_ERROR,
|
||||
expectSync: true,
|
||||
syncErrback: false,
|
||||
});
|
||||
});
|
||||
|
||||
test("successful return value", async () => {
|
||||
const fn = gensync(function*() {
|
||||
const value = yield* doSuccess();
|
||||
|
||||
expect(value).toBe(42);
|
||||
|
||||
return 84;
|
||||
});
|
||||
|
||||
await expectResult(fn, undefined, {
|
||||
value: 84,
|
||||
expectSync: true,
|
||||
syncErrback: false,
|
||||
});
|
||||
});
|
||||
|
||||
test("successful final value", async () => {
|
||||
const fn = gensync(function*() {
|
||||
return 42;
|
||||
});
|
||||
|
||||
await expectResult(fn, undefined, {
|
||||
value: 42,
|
||||
expectSync: true,
|
||||
});
|
||||
});
|
||||
|
||||
test("yield unexpected object", async () => {
|
||||
const fn = gensync(function*() {
|
||||
yield {};
|
||||
});
|
||||
|
||||
try {
|
||||
await fn.async();
|
||||
|
||||
throwTestError();
|
||||
} catch (err) {
|
||||
expect(err.message).toMatch(
|
||||
/Got unexpected yielded value in gensync generator/
|
||||
);
|
||||
expect(err.code).toBe("GENSYNC_EXPECTED_START");
|
||||
}
|
||||
});
|
||||
|
||||
test("yield suspend yield", async () => {
|
||||
const fn = gensync(function*() {
|
||||
yield Symbol.for("gensync:v1:start");
|
||||
|
||||
// Should be "yield*" for no error.
|
||||
yield {};
|
||||
});
|
||||
|
||||
try {
|
||||
await fn.async();
|
||||
|
||||
throwTestError();
|
||||
} catch (err) {
|
||||
expect(err.message).toMatch(/Expected GENSYNC_SUSPEND, got {}/);
|
||||
expect(err.code).toBe("GENSYNC_EXPECTED_SUSPEND");
|
||||
}
|
||||
});
|
||||
|
||||
test("yield suspend return", async () => {
|
||||
const fn = gensync(function*() {
|
||||
yield Symbol.for("gensync:v1:start");
|
||||
|
||||
// Should be "yield*" for no error.
|
||||
return {};
|
||||
});
|
||||
|
||||
try {
|
||||
await fn.async();
|
||||
|
||||
throwTestError();
|
||||
} catch (err) {
|
||||
expect(err.message).toMatch(/Unexpected generator completion/);
|
||||
expect(err.code).toBe("GENSYNC_EXPECTED_SUSPEND");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("gensync.all()", () => {
|
||||
test("success", async () => {
|
||||
const fn = gensync(function*() {
|
||||
const result = yield* gensync.all([doSuccess(), doSuccess()]);
|
||||
|
||||
expect(result).toEqual([42, 42]);
|
||||
});
|
||||
|
||||
await expectResult(fn, undefined, {
|
||||
value: undefined,
|
||||
expectSync: true,
|
||||
syncErrback: false,
|
||||
});
|
||||
});
|
||||
|
||||
test("error first", async () => {
|
||||
const fn = gensync(function*() {
|
||||
yield* gensync.all([doError(), doSuccess()]);
|
||||
});
|
||||
|
||||
await expectResult(fn, undefined, {
|
||||
error: DID_ERROR,
|
||||
expectSync: true,
|
||||
syncErrback: false,
|
||||
});
|
||||
});
|
||||
|
||||
test("error last", async () => {
|
||||
const fn = gensync(function*() {
|
||||
yield* gensync.all([doSuccess(), doError()]);
|
||||
});
|
||||
|
||||
await expectResult(fn, undefined, {
|
||||
error: DID_ERROR,
|
||||
expectSync: true,
|
||||
syncErrback: false,
|
||||
});
|
||||
});
|
||||
|
||||
test("empty list", async () => {
|
||||
const fn = gensync(function*() {
|
||||
yield* gensync.all([]);
|
||||
});
|
||||
|
||||
await expectResult(fn, undefined, {
|
||||
value: undefined,
|
||||
expectSync: true,
|
||||
syncErrback: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("gensync.race()", () => {
|
||||
test("success", async () => {
|
||||
const fn = gensync(function*() {
|
||||
const result = yield* gensync.race([doSuccess(), doError()]);
|
||||
|
||||
expect(result).toEqual(42);
|
||||
});
|
||||
|
||||
await expectResult(fn, undefined, {
|
||||
value: undefined,
|
||||
expectSync: true,
|
||||
syncErrback: false,
|
||||
});
|
||||
});
|
||||
|
||||
test("error", async () => {
|
||||
const fn = gensync(function*() {
|
||||
yield* gensync.race([doError(), doSuccess()]);
|
||||
});
|
||||
|
||||
await expectResult(fn, undefined, {
|
||||
error: DID_ERROR,
|
||||
expectSync: true,
|
||||
syncErrback: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,16 @@
|
||||
import { useMatch } from "./useMatch.js";
|
||||
function useParams(opts) {
|
||||
return useMatch({
|
||||
from: opts.from,
|
||||
strict: opts.strict,
|
||||
shouldThrow: opts.shouldThrow,
|
||||
structuralSharing: opts.structuralSharing,
|
||||
select: (match) => {
|
||||
return opts.select ? opts.select(match.params) : match.params;
|
||||
}
|
||||
});
|
||||
}
|
||||
export {
|
||||
useParams
|
||||
};
|
||||
//# sourceMappingURL=useParams.js.map
|
||||
@@ -0,0 +1,43 @@
|
||||
/**
|
||||
* Utility for resolving a module relative to another module
|
||||
* @author Teddy Katz
|
||||
*/
|
||||
|
||||
import Module from "node:module";
|
||||
|
||||
/*
|
||||
* `Module.createRequire` is added in v12.2.0. It supports URL as well.
|
||||
* We only support the case where the argument is a filepath, not a URL.
|
||||
*/
|
||||
const createRequire = Module.createRequire;
|
||||
|
||||
/**
|
||||
* Resolves a Node module relative to another module
|
||||
* @param {string} moduleName The name of a Node module, or a path to a Node module.
|
||||
* @param {string} relativeToPath An absolute path indicating the module that `moduleName` should be resolved relative to. This must be
|
||||
* a file rather than a directory, but the file need not actually exist.
|
||||
* @returns {string} The absolute path that would result from calling `require.resolve(moduleName)` in a file located at `relativeToPath`
|
||||
* @throws {Error} When the module cannot be resolved.
|
||||
*/
|
||||
function resolve(moduleName, relativeToPath) {
|
||||
try {
|
||||
return createRequire(relativeToPath).resolve(moduleName);
|
||||
} catch (error) {
|
||||
|
||||
// This `if` block is for older Node.js than 12.0.0. We can remove this block in the future.
|
||||
if (
|
||||
typeof error === "object" &&
|
||||
error !== null &&
|
||||
error.code === "MODULE_NOT_FOUND" &&
|
||||
!error.requireStack &&
|
||||
error.message.includes(moduleName)
|
||||
) {
|
||||
error.message += `\nRequire stack:\n- ${relativeToPath}`;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
resolve
|
||||
};
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2":"K D E F A B mC"},B:{"1":"CB DB EB FB GB HB IB JB KB LB MB NB OB I","2":"9 C L M G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w AB BB","4162":"0 x y z"},C:{"1":"0 9 w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"1 2 3 4 5 6 7 8 nC LC J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC qC rC","194":"CC DC Q H R OC S T U V W X Y Z a b","257":"c d e f g h i j k l m n o p q r s t","2049":"u v"},D:{"1":"0 9 U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","2":"1 2 3 4 5 6 7 8 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T"},E:{"1":"ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","2":"J PB K D E F A B C L M G sC SC tC uC vC wC TC FC GC xC yC zC UC VC HC 0C IC","1796":"WC XC YC"},F:{"1":"0 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z","2":"1 2 3 4 5 6 7 8 F B C G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 4C 5C 6C 7C FC kC 8C GC"},G:{"1":"ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","2":"E SC 9C lC AD BD CD DD ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD","1281":"IC WC XC YC"},H:{"2":"WD"},I:{"1":"I","2":"LC J XD YD ZD aD lC bD cD"},J:{"2":"D A"},K:{"1":"H","2":"A B C FC kC GC"},L:{"1":"I"},M:{"1":"EC"},N:{"2":"A B"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 lD mD IC JC KC nD","2":"J dD eD fD gD hD TC iD jD kD"},Q:{"2":"oD"},R:{"1":"pD"},S:{"2":"qD rD"}},B:6,C:"AVIF image format",D:true};
|
||||
@@ -0,0 +1,48 @@
|
||||
const BROWSER_MAPPING = {
|
||||
and_chr: 'chrome',
|
||||
and_ff: 'firefox',
|
||||
ie_mob: 'ie',
|
||||
op_mob: 'opera',
|
||||
and_qq: null,
|
||||
and_uc: null,
|
||||
baidu: null,
|
||||
bb: null,
|
||||
kaios: null,
|
||||
op_mini: null,
|
||||
};
|
||||
|
||||
function browserslistToTargets(browserslist) {
|
||||
let targets = {};
|
||||
for (let browser of browserslist) {
|
||||
let [name, v] = browser.split(' ');
|
||||
if (BROWSER_MAPPING[name] === null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let version = parseVersion(v);
|
||||
if (version == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (targets[name] == null || version < targets[name]) {
|
||||
targets[name] = version;
|
||||
}
|
||||
}
|
||||
|
||||
return targets;
|
||||
}
|
||||
|
||||
function parseVersion(version) {
|
||||
let [major, minor = 0, patch = 0] = version
|
||||
.split('-')[0]
|
||||
.split('.')
|
||||
.map(v => parseInt(v, 10));
|
||||
|
||||
if (isNaN(major) || isNaN(minor) || isNaN(patch)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (major << 16) | (minor << 8) | patch;
|
||||
}
|
||||
|
||||
module.exports = browserslistToTargets;
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2":"K D E F A B mC"},B:{"1":"0 9 G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I","16":"C L M"},C:{"1":"0 6 7 8 9 RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"1 2 3 4 5 nC LC J PB K D E F A B C L M G N O P QB qC rC"},D:{"1":"0 9 iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","2":"1 2 3 4 5 6 7 8 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB"},E:{"1":"E F A B C L M G vC wC TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","2":"J PB K D sC SC tC uC"},F:{"1":"0 VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z","2":"1 2 3 4 5 6 7 8 F B C G N O P QB RB SB TB UB 4C 5C 6C 7C FC kC 8C GC"},G:{"1":"E DD ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","2":"SC 9C lC AD BD CD"},H:{"2":"WD"},I:{"1":"I","2":"LC J XD YD ZD aD lC bD cD"},J:{"2":"D","16":"A"},K:{"1":"H","2":"A B C FC kC GC"},L:{"1":"I"},M:{"1":"EC"},N:{"2":"A B"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD","2":"J"},Q:{"1":"oD"},R:{"1":"pD"},S:{"1":"qD rD"}},B:6,C:"Array.prototype.find",D:true};
|
||||
@@ -0,0 +1,282 @@
|
||||
# Acorn
|
||||
|
||||
A tiny, fast JavaScript parser written in JavaScript.
|
||||
|
||||
## Community
|
||||
|
||||
Acorn is open source software released under an
|
||||
[MIT license](https://github.com/acornjs/acorn/blob/master/acorn/LICENSE).
|
||||
|
||||
You are welcome to
|
||||
[report bugs](https://github.com/acornjs/acorn/issues) or create pull
|
||||
requests on [github](https://github.com/acornjs/acorn).
|
||||
|
||||
## Installation
|
||||
|
||||
The easiest way to install acorn is from [`npm`](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
npm install acorn
|
||||
```
|
||||
|
||||
Alternately, you can download the source and build acorn yourself:
|
||||
|
||||
```sh
|
||||
git clone https://github.com/acornjs/acorn.git
|
||||
cd acorn
|
||||
npm install
|
||||
```
|
||||
|
||||
## Interface
|
||||
|
||||
**parse**`(input, options)` is the main interface to the library. The
|
||||
`input` parameter is a string, `options` must be an object setting
|
||||
some of the options listed below. The return value will be an abstract
|
||||
syntax tree object as specified by the [ESTree
|
||||
spec](https://github.com/estree/estree).
|
||||
|
||||
```javascript
|
||||
let acorn = require("acorn");
|
||||
console.log(acorn.parse("1 + 1", {ecmaVersion: 2020}));
|
||||
```
|
||||
|
||||
When encountering a syntax error, the parser will raise a
|
||||
`SyntaxError` object with a meaningful message. The error object will
|
||||
have a `pos` property that indicates the string offset at which the
|
||||
error occurred, and a `loc` object that contains a `{line, column}`
|
||||
object referring to that same position.
|
||||
|
||||
Options are provided by in a second argument, which should be an
|
||||
object containing any of these fields (only `ecmaVersion` is
|
||||
required):
|
||||
|
||||
- **ecmaVersion**: Indicates the ECMAScript version to parse. Can be a
|
||||
number, either in year (`2022`) or plain version number (`6`) form,
|
||||
or `"latest"` (the latest the library supports). This influences
|
||||
support for strict mode, the set of reserved words, and support for
|
||||
new syntax features.
|
||||
|
||||
**NOTE**: Only 'stage 4' (finalized) ECMAScript features are being
|
||||
implemented by Acorn. Other proposed new features must be
|
||||
implemented through plugins.
|
||||
|
||||
- **sourceType**: Indicate the mode the code should be parsed in. Can be
|
||||
either `"script"` or `"module"`. This influences global strict mode
|
||||
and parsing of `import` and `export` declarations.
|
||||
|
||||
**NOTE**: If set to `"module"`, then static `import` / `export` syntax
|
||||
will be valid, even if `ecmaVersion` is less than 6.
|
||||
|
||||
- **onInsertedSemicolon**: If given a callback, that callback will be
|
||||
called whenever a missing semicolon is inserted by the parser. The
|
||||
callback will be given the character offset of the point where the
|
||||
semicolon is inserted as argument, and if `locations` is on, also a
|
||||
`{line, column}` object representing this position.
|
||||
|
||||
- **onTrailingComma**: Like `onInsertedSemicolon`, but for trailing
|
||||
commas.
|
||||
|
||||
- **allowReserved**: If `false`, using a reserved word will generate
|
||||
an error. Defaults to `true` for `ecmaVersion` 3, `false` for higher
|
||||
versions. When given the value `"never"`, reserved words and
|
||||
keywords can also not be used as property names (as in Internet
|
||||
Explorer's old parser).
|
||||
|
||||
- **allowReturnOutsideFunction**: By default, a return statement at
|
||||
the top level raises an error. Set this to `true` to accept such
|
||||
code.
|
||||
|
||||
- **allowImportExportEverywhere**: By default, `import` and `export`
|
||||
declarations can only appear at a program's top level. Setting this
|
||||
option to `true` allows them anywhere where a statement is allowed,
|
||||
and also allows `import.meta` expressions to appear in scripts
|
||||
(when `sourceType` is not `"module"`).
|
||||
|
||||
- **allowAwaitOutsideFunction**: If `false`, `await` expressions can
|
||||
only appear inside `async` functions. Defaults to `true` in modules
|
||||
for `ecmaVersion` 2022 and later, `false` for lower versions.
|
||||
Setting this option to `true` allows to have top-level `await`
|
||||
expressions. They are still not allowed in non-`async` functions,
|
||||
though.
|
||||
|
||||
- **allowSuperOutsideMethod**: By default, `super` outside a method
|
||||
raises an error. Set this to `true` to accept such code.
|
||||
|
||||
- **allowHashBang**: When this is enabled, if the code starts with the
|
||||
characters `#!` (as in a shellscript), the first line will be
|
||||
treated as a comment. Defaults to true when `ecmaVersion` >= 2023.
|
||||
|
||||
- **checkPrivateFields**: By default, the parser will verify that
|
||||
private properties are only used in places where they are valid and
|
||||
have been declared. Set this to false to turn such checks off.
|
||||
|
||||
- **locations**: When `true`, each node has a `loc` object attached
|
||||
with `start` and `end` subobjects, each of which contains the
|
||||
one-based line and zero-based column numbers in `{line, column}`
|
||||
form. Default is `false`.
|
||||
|
||||
- **onToken**: If a function is passed for this option, each found
|
||||
token will be passed in same format as tokens returned from
|
||||
`tokenizer().getToken()`.
|
||||
|
||||
If array is passed, each found token is pushed to it.
|
||||
|
||||
Note that you are not allowed to call the parser from the
|
||||
callback—that will corrupt its internal state.
|
||||
|
||||
- **onComment**: If a function is passed for this option, whenever a
|
||||
comment is encountered the function will be called with the
|
||||
following parameters:
|
||||
|
||||
- `block`: `true` if the comment is a block comment, false if it
|
||||
is a line comment.
|
||||
- `text`: The content of the comment.
|
||||
- `start`: Character offset of the start of the comment.
|
||||
- `end`: Character offset of the end of the comment.
|
||||
|
||||
When the `locations` options is on, the `{line, column}` locations
|
||||
of the comment’s start and end are passed as two additional
|
||||
parameters.
|
||||
|
||||
If array is passed for this option, each found comment is pushed
|
||||
to it as object in Esprima format:
|
||||
|
||||
```javascript
|
||||
{
|
||||
"type": "Line" | "Block",
|
||||
"value": "comment text",
|
||||
"start": Number,
|
||||
"end": Number,
|
||||
// If `locations` option is on:
|
||||
"loc": {
|
||||
"start": {line: Number, column: Number}
|
||||
"end": {line: Number, column: Number}
|
||||
},
|
||||
// If `ranges` option is on:
|
||||
"range": [Number, Number]
|
||||
}
|
||||
```
|
||||
|
||||
Note that you are not allowed to call the parser from the
|
||||
callback—that will corrupt its internal state.
|
||||
|
||||
- **ranges**: Nodes have their start and end characters offsets
|
||||
recorded in `start` and `end` properties (directly on the node,
|
||||
rather than the `loc` object, which holds line/column data. To also
|
||||
add a
|
||||
[semi-standardized](https://bugzilla.mozilla.org/show_bug.cgi?id=745678)
|
||||
`range` property holding a `[start, end]` array with the same
|
||||
numbers, set the `ranges` option to `true`.
|
||||
|
||||
- **program**: It is possible to parse multiple files into a single
|
||||
AST by passing the tree produced by parsing the first file as the
|
||||
`program` option in subsequent parses. This will add the toplevel
|
||||
forms of the parsed file to the "Program" (top) node of an existing
|
||||
parse tree.
|
||||
|
||||
- **sourceFile**: When the `locations` option is `true`, you can pass
|
||||
this option to add a `source` attribute in every node’s `loc`
|
||||
object. Note that the contents of this option are not examined or
|
||||
processed in any way; you are free to use whatever format you
|
||||
choose.
|
||||
|
||||
- **directSourceFile**: Like `sourceFile`, but a `sourceFile` property
|
||||
will be added (regardless of the `location` option) directly to the
|
||||
nodes, rather than the `loc` object.
|
||||
|
||||
- **preserveParens**: If this option is `true`, parenthesized expressions
|
||||
are represented by (non-standard) `ParenthesizedExpression` nodes
|
||||
that have a single `expression` property containing the expression
|
||||
inside parentheses.
|
||||
|
||||
**parseExpressionAt**`(input, offset, options)` will parse a single
|
||||
expression in a string, and return its AST. It will not complain if
|
||||
there is more of the string left after the expression.
|
||||
|
||||
**tokenizer**`(input, options)` returns an object with a `getToken`
|
||||
method that can be called repeatedly to get the next token, a `{start,
|
||||
end, type, value}` object (with added `loc` property when the
|
||||
`locations` option is enabled and `range` property when the `ranges`
|
||||
option is enabled). When the token's type is `tokTypes.eof`, you
|
||||
should stop calling the method, since it will keep returning that same
|
||||
token forever.
|
||||
|
||||
Note that tokenizing JavaScript without parsing it is, in modern
|
||||
versions of the language, not really possible due to the way syntax is
|
||||
overloaded in ways that can only be disambiguated by the parse
|
||||
context. This package applies a bunch of heuristics to try and do a
|
||||
reasonable job, but you are advised to use `parse` with the `onToken`
|
||||
option instead of this.
|
||||
|
||||
In ES6 environment, returned result can be used as any other
|
||||
protocol-compliant iterable:
|
||||
|
||||
```javascript
|
||||
for (let token of acorn.tokenizer(str)) {
|
||||
// iterate over the tokens
|
||||
}
|
||||
|
||||
// transform code to array of tokens:
|
||||
var tokens = [...acorn.tokenizer(str)];
|
||||
```
|
||||
|
||||
**tokTypes** holds an object mapping names to the token type objects
|
||||
that end up in the `type` properties of tokens.
|
||||
|
||||
**getLineInfo**`(input, offset)` can be used to get a `{line,
|
||||
column}` object for a given program string and offset.
|
||||
|
||||
### The `Parser` class
|
||||
|
||||
Instances of the **`Parser`** class contain all the state and logic
|
||||
that drives a parse. It has static methods `parse`,
|
||||
`parseExpressionAt`, and `tokenizer` that match the top-level
|
||||
functions by the same name.
|
||||
|
||||
When extending the parser with plugins, you need to call these methods
|
||||
on the extended version of the class. To extend a parser with plugins,
|
||||
you can use its static `extend` method.
|
||||
|
||||
```javascript
|
||||
var acorn = require("acorn");
|
||||
var jsx = require("acorn-jsx");
|
||||
var JSXParser = acorn.Parser.extend(jsx());
|
||||
JSXParser.parse("foo(<bar/>)", {ecmaVersion: 2020});
|
||||
```
|
||||
|
||||
The `extend` method takes any number of plugin values, and returns a
|
||||
new `Parser` class that includes the extra parser logic provided by
|
||||
the plugins.
|
||||
|
||||
## Command line interface
|
||||
|
||||
The `bin/acorn` utility can be used to parse a file from the command
|
||||
line. It accepts as arguments its input file and the following
|
||||
options:
|
||||
|
||||
- `--ecma3|--ecma5|--ecma6|--ecma7|--ecma8|--ecma9|--ecma10`: Sets the ECMAScript version
|
||||
to parse. Default is version 9.
|
||||
|
||||
- `--module`: Sets the parsing mode to `"module"`. Is set to `"script"` otherwise.
|
||||
|
||||
- `--locations`: Attaches a "loc" object to each node with "start" and
|
||||
"end" subobjects, each of which contains the one-based line and
|
||||
zero-based column numbers in `{line, column}` form.
|
||||
|
||||
- `--allow-hash-bang`: If the code starts with the characters #! (as
|
||||
in a shellscript), the first line will be treated as a comment.
|
||||
|
||||
- `--allow-await-outside-function`: Allows top-level `await` expressions.
|
||||
See the `allowAwaitOutsideFunction` option for more information.
|
||||
|
||||
- `--compact`: No whitespace is used in the AST output.
|
||||
|
||||
- `--silent`: Do not output the AST, just return the exit status.
|
||||
|
||||
- `--help`: Print the usage information and quit.
|
||||
|
||||
The utility spits out the syntax tree as JSON data.
|
||||
|
||||
## Existing plugins
|
||||
|
||||
- [`acorn-jsx`](https://github.com/RReverser/acorn-jsx): Parse [Facebook JSX syntax extensions](https://github.com/facebook/jsx)
|
||||
@@ -0,0 +1,12 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = _OverloadYield;
|
||||
function _OverloadYield(value, kind) {
|
||||
this.v = value;
|
||||
this.k = kind;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=OverloadYield.js.map
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"routerContext.js","sources":["../../src/routerContext.tsx"],"sourcesContent":["import * as React from 'react'\nimport type { AnyRouter } from '@tanstack/router-core'\n\ndeclare global {\n interface Window {\n __TSR_ROUTER_CONTEXT__?: React.Context<AnyRouter>\n }\n}\n\nconst routerContext = React.createContext<AnyRouter>(null!)\n\nexport function getRouterContext() {\n if (typeof document === 'undefined') {\n return routerContext\n }\n\n if (window.__TSR_ROUTER_CONTEXT__) {\n return window.__TSR_ROUTER_CONTEXT__\n }\n\n window.__TSR_ROUTER_CONTEXT__ = routerContext as any\n\n return routerContext\n}\n"],"names":[],"mappings":";AASA,MAAM,gBAAgB,MAAM,cAAyB,IAAK;AAEnD,SAAS,mBAAmB;AAC7B,MAAA,OAAO,aAAa,aAAa;AAC5B,WAAA;AAAA,EAAA;AAGT,MAAI,OAAO,wBAAwB;AACjC,WAAO,OAAO;AAAA,EAAA;AAGhB,SAAO,yBAAyB;AAEzB,SAAA;AACT;"}
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2":"K D E F A B mC"},B:{"1":"Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w","2":"C L M G N O P","130":"0 9 x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I"},C:{"2":"0 1 2 3 4 5 6 7 8 9 nC LC J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC qC rC"},D:{"1":"1 2 3 4 5 6 7 8 G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w","16":"J PB K D E F A B C L M","130":"0 9 x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC"},E:{"1":"J PB K D E F A B tC uC vC wC TC FC","16":"sC SC","130":"C L M G GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C"},F:{"1":"1 2 3 4 5 6 7 8 G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i","2":"F B C 4C 5C 6C 7C FC kC 8C GC","130":"0 j k l m n o p q r s t u v w x y z"},G:{"1":"E 9C lC AD BD CD DD ED FD GD HD ID JD","16":"SC","130":"KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC"},H:{"2":"WD"},I:{"1":"LC J XD YD ZD aD lC bD cD","130":"I"},J:{"16":"D A"},K:{"1":"H","2":"A B C FC kC GC"},L:{"130":"I"},M:{"2":"EC"},N:{"2":"A B"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"1":"oD"},R:{"1":"pD"},S:{"2":"qD rD"}},B:7,C:"CSS overflow: overlay",D:true};
|
||||
@@ -0,0 +1,20 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Feross Aboukhadijeh
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014-2017, Jon Schlinkert.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
@@ -0,0 +1,600 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
|
||||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
||||
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
memo.lastIndex = ++index;
|
||||
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = parse(map);
|
||||
if (!('sections' in parsed)) {
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
}
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
const ignoreList = [];
|
||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
ignoreList,
|
||||
};
|
||||
return presortedDecodedMap(joined);
|
||||
};
|
||||
function parse(map) {
|
||||
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||
}
|
||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const { sections } = input;
|
||||
for (let i = 0; i < sections.length; i++) {
|
||||
const { map, offset } = sections[i];
|
||||
let sl = stopLine;
|
||||
let sc = stopColumn;
|
||||
if (i + 1 < sections.length) {
|
||||
const nextOffset = sections[i + 1].offset;
|
||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||
if (sl === stopLine) {
|
||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||
}
|
||||
else if (sl < stopLine) {
|
||||
sc = columnOffset + nextOffset.column;
|
||||
}
|
||||
}
|
||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||
}
|
||||
}
|
||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const parsed = parse(input);
|
||||
if ('sections' in parsed)
|
||||
return recurse(...arguments);
|
||||
const map = new TraceMap(parsed, mapUrl);
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = decodedMappings(map);
|
||||
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(names, map.names);
|
||||
if (contents)
|
||||
append(sourcesContent, contents);
|
||||
else
|
||||
for (let i = 0; i < resolvedSources.length; i++)
|
||||
sourcesContent.push(null);
|
||||
if (ignores)
|
||||
for (let i = 0; i < ignores.length; i++)
|
||||
ignoreList.push(ignores[i] + sourcesOffset);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const lineI = lineOffset + i;
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||
// still need to check that we don't overstep lines, too.
|
||||
if (lineI > stopLine)
|
||||
return;
|
||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||
const out = getLine(mappings, lineI);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (lineI === stopLine && column >= stopColumn)
|
||||
return;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
out.push(seg.length === 4
|
||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
function getLine(arr, index) {
|
||||
for (let i = arr.length; i <= index; i++)
|
||||
arr[i] = [];
|
||||
return arr[index];
|
||||
}
|
||||
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map._decodedMemo)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names || [];
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function encodedMappings(map) {
|
||||
var _a;
|
||||
var _b;
|
||||
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = sourcemapCodec.encode(cast(map)._decoded)));
|
||||
}
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function decodedMappings(map) {
|
||||
var _a;
|
||||
return ((_a = cast(map))._decoded || (_a._decoded = sourcemapCodec.decode(cast(map)._encoded)));
|
||||
}
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
function traceSegment(map, line, column) {
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
return index === -1 ? null : segments[index];
|
||||
}
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
function originalPositionFor(map, needle) {
|
||||
let { line, column, bias } = needle;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return OMapping(null, null, null, null);
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (index === -1)
|
||||
return OMapping(null, null, null, null);
|
||||
const segment = segments[index];
|
||||
if (segment.length === 1)
|
||||
return OMapping(null, null, null, null);
|
||||
const { names, resolvedSources } = map;
|
||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||
}
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
function generatedPositionFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||
}
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
function allGeneratedPositionsFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||
}
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
function eachMapping(map, cb) {
|
||||
const decoded = decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
function sourceIndex(map, source) {
|
||||
const { sources, resolvedSources } = map;
|
||||
let index = sources.indexOf(source);
|
||||
if (index === -1)
|
||||
index = resolvedSources.indexOf(source);
|
||||
return index;
|
||||
}
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
function sourceContentFor(map, source) {
|
||||
const { sourcesContent } = map;
|
||||
if (sourcesContent == null)
|
||||
return null;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? null : sourcesContent[index];
|
||||
}
|
||||
/**
|
||||
* Determines if the source is marked to ignore by the source map.
|
||||
*/
|
||||
function isIgnored(map, source) {
|
||||
const { ignoreList } = map;
|
||||
if (ignoreList == null)
|
||||
return false;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? false : ignoreList.includes(index);
|
||||
}
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
function presortedDecodedMap(map, mapUrl) {
|
||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||
cast(tracer)._decoded = map.mappings;
|
||||
return tracer;
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function decodedMap(map) {
|
||||
return clone(map, decodedMappings(map));
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function encodedMap(map) {
|
||||
return clone(map, encodedMappings(map));
|
||||
}
|
||||
function clone(map, mappings) {
|
||||
return {
|
||||
version: map.version,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings,
|
||||
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||
};
|
||||
}
|
||||
function OMapping(source, line, column, name) {
|
||||
return { source, line, column, name };
|
||||
}
|
||||
function GMapping(line, column) {
|
||||
return { line, column };
|
||||
}
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return -1;
|
||||
return index;
|
||||
}
|
||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||
// match LEAST_UPPER_BOUND.
|
||||
if (!found && bias === LEAST_UPPER_BOUND)
|
||||
min++;
|
||||
if (min === -1 || min === segments.length)
|
||||
return [];
|
||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||
// to our desired column.
|
||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||
if (!found)
|
||||
min = lowerBound(segments, matchedColumn, min);
|
||||
const max = upperBound(segments, matchedColumn, min);
|
||||
const result = [];
|
||||
for (; min <= max; min++) {
|
||||
const segment = segments[min];
|
||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function generatedPosition(map, source, line, column, bias, all) {
|
||||
var _a;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||
if (all)
|
||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||
if (index === -1)
|
||||
return GMapping(null, null);
|
||||
const segment = segments[index];
|
||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||
}
|
||||
|
||||
exports.AnyMap = AnyMap;
|
||||
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
||||
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
||||
exports.TraceMap = TraceMap;
|
||||
exports.allGeneratedPositionsFor = allGeneratedPositionsFor;
|
||||
exports.decodedMap = decodedMap;
|
||||
exports.decodedMappings = decodedMappings;
|
||||
exports.eachMapping = eachMapping;
|
||||
exports.encodedMap = encodedMap;
|
||||
exports.encodedMappings = encodedMappings;
|
||||
exports.generatedPositionFor = generatedPositionFor;
|
||||
exports.isIgnored = isIgnored;
|
||||
exports.originalPositionFor = originalPositionFor;
|
||||
exports.presortedDecodedMap = presortedDecodedMap;
|
||||
exports.sourceContentFor = sourceContentFor;
|
||||
exports.traceSegment = traceSegment;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=trace-mapping.umd.js.map
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={C:{"91":0.00181,"113":0.00362,"115":0.03986,"124":0.00181,"128":0.01087,"131":0.00362,"132":0.00181,"133":0.00544,"134":0.00544,"135":0.07792,"136":0.19932,_:"2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 114 116 117 118 119 120 121 122 123 125 126 127 129 130 137 138 139 140 3.5 3.6"},D:{"11":0.00181,"43":0.00181,"47":0.00181,"49":0.00181,"50":0.00181,"56":0.00181,"57":0.00181,"58":0.00181,"64":0.00181,"66":0.00181,"69":0.00181,"70":0.00181,"72":0.00181,"73":0.01268,"74":0.00544,"77":0.00181,"78":0.00906,"79":0.00362,"81":0.01087,"83":0.00181,"85":0.01087,"86":0.01268,"87":0.03262,"88":0.00362,"90":0.00181,"91":0.00362,"92":0.01993,"93":0.00181,"94":0.00362,"95":0.00725,"98":0.00362,"99":0.00181,"101":0.00362,"102":0.01087,"103":0.00362,"104":0.01268,"105":0.00181,"106":0.01268,"107":0.00181,"108":0.00362,"109":0.67769,"111":0.04349,"112":0.00181,"114":0.04349,"116":0.0145,"117":0.00725,"118":0.0145,"119":0.03986,"120":0.00362,"121":0.00906,"122":0.02174,"123":0.02356,"124":0.01812,"125":0.01993,"126":0.02718,"127":0.01268,"128":0.02174,"129":0.01268,"130":0.02356,"131":0.06886,"132":0.08516,"133":1.62899,"134":2.87202,"135":0.00906,_:"4 5 6 7 8 9 10 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 44 45 46 48 51 52 53 54 55 59 60 61 62 63 65 67 68 71 75 76 80 84 89 96 97 100 110 113 115 136 137 138"},F:{"46":0.00181,"79":0.02899,"87":0.00181,"95":0.04892,"108":0.00362,"114":0.00181,"116":0.01993,"117":0.3624,_:"9 11 12 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 47 48 49 50 51 52 53 54 55 56 57 58 60 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 80 81 82 83 84 85 86 88 89 90 91 92 93 94 96 97 98 99 100 101 102 103 104 105 106 107 109 110 111 112 113 115 9.5-9.6 10.0-10.1 10.5 10.6 11.1 11.5 11.6 12.1"},B:{"12":0.00181,"14":0.00181,"18":0.01087,"84":0.00544,"89":0.00362,"90":0.00544,"92":0.03805,"96":0.00362,"100":0.00906,"102":0.00181,"109":0.0145,"114":0.00181,"121":0.00362,"122":0.00906,"123":0.00181,"124":0.00181,"126":0.00906,"127":0.00362,"128":0.00181,"129":0.00362,"130":0.00725,"131":0.04711,"132":0.0308,"133":0.53816,"134":0.99298,_:"13 15 16 17 79 80 81 83 85 86 87 88 91 93 94 95 97 98 99 101 103 104 105 106 107 108 110 111 112 113 115 116 117 118 119 120 125"},E:{"14":0.00362,_:"0 4 5 6 7 8 9 10 11 12 13 15 3.1 3.2 5.1 6.1 7.1 9.1 10.1 11.1 12.1 14.1 15.1 15.2-15.3 15.4 15.5 16.0 16.2 16.4 16.5 17.0 17.1 17.2 17.4 18.1 18.4","13.1":0.00725,"15.6":0.02356,"16.1":0.00181,"16.3":0.00181,"16.6":0.00906,"17.3":0.00181,"17.5":0.00181,"17.6":0.00725,"18.0":0.00544,"18.2":0.00362,"18.3":0.0308},G:{"8":0,"3.2":0,"4.0-4.1":0,"4.2-4.3":0.00088,"5.0-5.1":0,"6.0-6.1":0.00265,"7.0-7.1":0.00177,"8.1-8.4":0,"9.0-9.2":0.00132,"9.3":0.00618,"10.0-10.2":0.00044,"10.3":0.01015,"11.0-11.2":0.04678,"11.3-11.4":0.00309,"12.0-12.1":0.00177,"12.2-12.5":0.04369,"13.0-13.1":0.00088,"13.2":0.00132,"13.3":0.00177,"13.4-13.7":0.00618,"14.0-14.4":0.01545,"14.5-14.8":0.01854,"15.0-15.1":0.01015,"15.2-15.3":0.01015,"15.4":0.01236,"15.5":0.01412,"15.6-15.8":0.17389,"16.0":0.02471,"16.1":0.05075,"16.2":0.02648,"16.3":0.0459,"16.4":0.01015,"16.5":0.01898,"16.6-16.7":0.2061,"17.0":0.01236,"17.1":0.02207,"17.2":0.01677,"17.3":0.02339,"17.4":0.04678,"17.5":0.10415,"17.6-17.7":0.30231,"18.0":0.08474,"18.1":0.27716,"18.2":0.12401,"18.3":2.59195,"18.4":0.0384},P:{"4":0.06337,"21":0.01056,"22":0.02112,"23":0.02112,"24":0.11617,"25":0.02112,"26":0.04225,"27":0.28516,_:"20 5.0-5.4 6.2-6.4 8.2 10.1 11.1-11.2 12.0 13.0 15.0 18.0","7.2-7.4":0.06337,"9.2":0.01056,"14.0":0.02112,"16.0":0.01056,"17.0":0.03168,"19.0":0.01056},I:{"0":0.00817,"3":0,"4":0,"2.1":0,"2.2":0,"2.3":0,"4.1":0,"4.2-4.3":0,"4.4":0,"4.4.3-4.4.4":0.00001},K:{"0":1.85606,_:"10 11 12 11.1 11.5 12.1"},A:{"11":0.00362,_:"6 7 8 9 10 5.5"},S:{"2.5":0.27839,_:"3.0-3.1"},J:{_:"7 10"},N:{_:"10 11"},R:{_:"0"},M:{"0":0.29477},Q:{_:"14.9"},O:{"0":0.0655},H:{"0":0.15},L:{"0":83.34809}};
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"1":"B","2":"K D E F mC","289":"A"},B:{"1":"0 9 C L M G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I"},C:{"1":"0 9 uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"1 2 3 4 5 6 7 8 nC LC J PB K D E F A B C L M G N O P QB RB qC rC","194":"SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB","1025":"pB qB rB sB tB"},D:{"1":"0 9 ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","2":"1 2 3 4 5 6 7 8 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB"},E:{"2050":"J PB K D E F A B C L M G sC SC tC uC vC wC TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C"},F:{"1":"0 4 5 6 7 8 RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z","2":"1 2 3 F B C G N O P QB 4C 5C 6C 7C FC kC 8C GC"},G:{"1":"MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","2":"E SC 9C lC AD BD CD DD ED","516":"FD GD HD ID JD KD LD"},H:{"2":"WD"},I:{"1":"I","2":"LC J XD YD ZD aD lC bD cD"},J:{"2":"D A"},K:{"1":"H","2":"A B C FC kC GC"},L:{"1":"I"},M:{"1":"EC"},N:{"1":"B","289":"A"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"1":"oD"},R:{"1":"pD"},S:{"1":"rD","194":"qD"}},B:2,C:"CSS touch-action property",D:true};
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"1":"A B","2":"K D E F mC"},B:{"1":"0 9 C L M G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I"},C:{"1":"0 4 5 6 7 8 9 RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"nC LC qC rC","33":"1 2 3 B C L M G N O P QB","164":"J PB K D E F A"},D:{"1":"0 5 6 7 8 9 RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","2":"J PB K D E F","33":"3 4","164":"1 2 P QB","420":"A B C L M G N O"},E:{"1":"D E F A B C L M G uC vC wC TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","2":"J PB sC SC tC","33":"K"},F:{"1":"0 1 2 3 4 5 6 7 8 G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z","2":"F B C 4C 5C 6C 7C FC kC 8C GC"},G:{"1":"E CD DD ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","2":"SC 9C lC AD","33":"BD"},H:{"2":"WD"},I:{"1":"I bD cD","2":"LC J XD YD ZD aD lC"},J:{"1":"A","2":"D"},K:{"1":"H","2":"A B C FC kC GC"},L:{"1":"I"},M:{"1":"EC"},N:{"1":"A B"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"1":"oD"},R:{"1":"pD"},S:{"1":"qD rD"}},B:1,C:"requestAnimationFrame",D:true};
|
||||
@@ -0,0 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
module.exports = require('./cjs/react-refresh-runtime.production.min.js');
|
||||
} else {
|
||||
module.exports = require('./cjs/react-refresh-runtime.development.js');
|
||||
}
|
||||
Reference in New Issue
Block a user