update
This commit is contained in:
File diff suppressed because one or more lines are too long
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"name": "semver",
|
||||
"version": "6.3.1",
|
||||
"description": "The semantic version parser used by npm.",
|
||||
"main": "semver.js",
|
||||
"scripts": {
|
||||
"test": "tap test/ --100 --timeout=30",
|
||||
"lint": "echo linting disabled",
|
||||
"postlint": "template-oss-check",
|
||||
"template-oss-apply": "template-oss-apply --force",
|
||||
"lintfix": "npm run lint -- --fix",
|
||||
"snap": "tap test/ --100 --timeout=30",
|
||||
"posttest": "npm run lint"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@npmcli/template-oss": "4.17.0",
|
||||
"tap": "^12.7.0"
|
||||
},
|
||||
"license": "ISC",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/npm/node-semver.git"
|
||||
},
|
||||
"bin": {
|
||||
"semver": "./bin/semver.js"
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
"range.bnf",
|
||||
"semver.js"
|
||||
],
|
||||
"author": "GitHub Inc.",
|
||||
"templateOSS": {
|
||||
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
|
||||
"content": "./scripts/template-oss",
|
||||
"version": "4.17.0"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.PLACEHOLDERS_FLIPPED_ALIAS = exports.PLACEHOLDERS_ALIAS = exports.PLACEHOLDERS = void 0;
|
||||
var _utils = require("./utils.js");
|
||||
const PLACEHOLDERS = exports.PLACEHOLDERS = ["Identifier", "StringLiteral", "Expression", "Statement", "Declaration", "BlockStatement", "ClassBody", "Pattern"];
|
||||
const PLACEHOLDERS_ALIAS = exports.PLACEHOLDERS_ALIAS = {
|
||||
Declaration: ["Statement"],
|
||||
Pattern: ["PatternLike", "LVal"]
|
||||
};
|
||||
for (const type of PLACEHOLDERS) {
|
||||
const alias = _utils.ALIAS_KEYS[type];
|
||||
if (alias != null && alias.length) PLACEHOLDERS_ALIAS[type] = alias;
|
||||
}
|
||||
const PLACEHOLDERS_FLIPPED_ALIAS = exports.PLACEHOLDERS_FLIPPED_ALIAS = {};
|
||||
Object.keys(PLACEHOLDERS_ALIAS).forEach(type => {
|
||||
PLACEHOLDERS_ALIAS[type].forEach(alias => {
|
||||
if (!hasOwnProperty.call(PLACEHOLDERS_FLIPPED_ALIAS, alias)) {
|
||||
PLACEHOLDERS_FLIPPED_ALIAS[alias] = [];
|
||||
}
|
||||
PLACEHOLDERS_FLIPPED_ALIAS[alias].push(type);
|
||||
});
|
||||
});
|
||||
|
||||
//# sourceMappingURL=placeholders.js.map
|
||||
@@ -0,0 +1,423 @@
|
||||
import { LazyRoute } from './fileRoute.cjs';
|
||||
import { NotFoundError } from './not-found.cjs';
|
||||
import { NavigateOptions, ParsePathParams } from './link.cjs';
|
||||
import { ParsedLocation } from './location.cjs';
|
||||
import { AnyRouteMatch, MakeRouteMatchFromRoute, MakeRouteMatchUnion, RouteMatch } from './Matches.cjs';
|
||||
import { RootRouteId } from './root.cjs';
|
||||
import { ParseRoute, RouteById, RoutePaths } from './routeInfo.cjs';
|
||||
import { AnyRouter, RegisteredRouter } from './router.cjs';
|
||||
import { BuildLocationFn, NavigateFn } from './RouterProvider.cjs';
|
||||
import { Assign, Constrain, Expand, IntersectAssign, NoInfer } from './utils.cjs';
|
||||
import { AnySchema, AnyStandardSchemaValidator, AnyValidator, AnyValidatorAdapter, AnyValidatorObj, DefaultValidator, ResolveSearchValidatorInput, ResolveValidatorOutput, StandardSchemaValidator, ValidatorAdapter, ValidatorFn, ValidatorObj } from './validators.cjs';
|
||||
export type AnyPathParams = {};
|
||||
export type SearchSchemaInput = {
|
||||
__TSearchSchemaInput__: 'TSearchSchemaInput';
|
||||
};
|
||||
export type AnyContext = {};
|
||||
export interface RouteContext {
|
||||
}
|
||||
export type PreloadableObj = {
|
||||
preload?: () => Promise<void>;
|
||||
};
|
||||
export type RoutePathOptions<TCustomId, TPath> = {
|
||||
path: TPath;
|
||||
} | {
|
||||
id: TCustomId;
|
||||
};
|
||||
export interface StaticDataRouteOption {
|
||||
}
|
||||
export type RoutePathOptionsIntersection<TCustomId, TPath> = {
|
||||
path: TPath;
|
||||
id: TCustomId;
|
||||
};
|
||||
export type SearchFilter<TInput, TResult = TInput> = (prev: TInput) => TResult;
|
||||
export type SearchMiddlewareContext<TSearchSchema> = {
|
||||
search: TSearchSchema;
|
||||
next: (newSearch: TSearchSchema) => TSearchSchema;
|
||||
};
|
||||
export type SearchMiddleware<TSearchSchema> = (ctx: SearchMiddlewareContext<TSearchSchema>) => TSearchSchema;
|
||||
export type ResolveId<TParentRoute, TCustomId extends string, TPath extends string> = TParentRoute extends {
|
||||
id: infer TParentId extends string;
|
||||
} ? RoutePrefix<TParentId, string extends TCustomId ? TPath : TCustomId> : RootRouteId;
|
||||
export type InferFullSearchSchema<TRoute> = TRoute extends {
|
||||
types: {
|
||||
fullSearchSchema: infer TFullSearchSchema;
|
||||
};
|
||||
} ? TFullSearchSchema : {};
|
||||
export type InferFullSearchSchemaInput<TRoute> = TRoute extends {
|
||||
types: {
|
||||
fullSearchSchemaInput: infer TFullSearchSchemaInput;
|
||||
};
|
||||
} ? TFullSearchSchemaInput : {};
|
||||
export type InferAllParams<TRoute> = TRoute extends {
|
||||
types: {
|
||||
allParams: infer TAllParams;
|
||||
};
|
||||
} ? TAllParams : {};
|
||||
export type InferAllContext<TRoute> = unknown extends TRoute ? TRoute : TRoute extends {
|
||||
types: {
|
||||
allContext: infer TAllContext;
|
||||
};
|
||||
} ? TAllContext : {};
|
||||
export type ResolveSearchSchemaFnInput<TSearchValidator> = TSearchValidator extends (input: infer TSearchSchemaInput) => any ? TSearchSchemaInput extends SearchSchemaInput ? Omit<TSearchSchemaInput, keyof SearchSchemaInput> : ResolveSearchSchemaFn<TSearchValidator> : AnySchema;
|
||||
export type ResolveSearchSchemaInput<TSearchValidator> = TSearchValidator extends AnyStandardSchemaValidator ? NonNullable<TSearchValidator['~standard']['types']>['input'] : TSearchValidator extends AnyValidatorAdapter ? TSearchValidator['types']['input'] : TSearchValidator extends AnyValidatorObj ? ResolveSearchSchemaFnInput<TSearchValidator['parse']> : ResolveSearchSchemaFnInput<TSearchValidator>;
|
||||
export type ResolveSearchSchemaFn<TSearchValidator> = TSearchValidator extends (...args: any) => infer TSearchSchema ? TSearchSchema : AnySchema;
|
||||
export type ResolveSearchSchema<TSearchValidator> = unknown extends TSearchValidator ? TSearchValidator : TSearchValidator extends AnyStandardSchemaValidator ? NonNullable<TSearchValidator['~standard']['types']>['output'] : TSearchValidator extends AnyValidatorAdapter ? TSearchValidator['types']['output'] : TSearchValidator extends AnyValidatorObj ? ResolveSearchSchemaFn<TSearchValidator['parse']> : ResolveSearchSchemaFn<TSearchValidator>;
|
||||
export type ParseSplatParams<TPath extends string> = TPath & `${string}$` extends never ? TPath & `${string}$/${string}` extends never ? never : '_splat' : '_splat';
|
||||
export interface SplatParams {
|
||||
_splat?: string;
|
||||
}
|
||||
export type ResolveParams<TPath extends string> = ParseSplatParams<TPath> extends never ? Record<ParsePathParams<TPath>, string> : Record<ParsePathParams<TPath>, string> & SplatParams;
|
||||
export type ParseParamsFn<in out TPath extends string, in out TParams> = (rawParams: ResolveParams<TPath>) => TParams extends Record<ParsePathParams<TPath>, any> ? TParams : Record<ParsePathParams<TPath>, any>;
|
||||
export type StringifyParamsFn<in out TPath extends string, in out TParams> = (params: TParams) => ResolveParams<TPath>;
|
||||
export type ParamsOptions<in out TPath extends string, in out TParams> = {
|
||||
params?: {
|
||||
parse?: ParseParamsFn<TPath, TParams>;
|
||||
stringify?: StringifyParamsFn<TPath, TParams>;
|
||||
};
|
||||
/**
|
||||
@deprecated Use params.parse instead
|
||||
*/
|
||||
parseParams?: ParseParamsFn<TPath, TParams>;
|
||||
/**
|
||||
@deprecated Use params.stringify instead
|
||||
*/
|
||||
stringifyParams?: StringifyParamsFn<TPath, TParams>;
|
||||
};
|
||||
interface RequiredStaticDataRouteOption {
|
||||
staticData: StaticDataRouteOption;
|
||||
}
|
||||
interface OptionalStaticDataRouteOption {
|
||||
staticData?: StaticDataRouteOption;
|
||||
}
|
||||
export type UpdatableStaticRouteOption = {} extends StaticDataRouteOption ? OptionalStaticDataRouteOption : RequiredStaticDataRouteOption;
|
||||
export type MetaDescriptor = {
|
||||
charSet: 'utf-8';
|
||||
} | {
|
||||
title: string;
|
||||
} | {
|
||||
name: string;
|
||||
content: string;
|
||||
} | {
|
||||
property: string;
|
||||
content: string;
|
||||
} | {
|
||||
httpEquiv: string;
|
||||
content: string;
|
||||
} | {
|
||||
'script:ld+json': LdJsonObject;
|
||||
} | {
|
||||
tagName: 'meta' | 'link';
|
||||
[name: string]: string;
|
||||
} | Record<string, unknown>;
|
||||
type LdJsonObject = {
|
||||
[Key in string]: LdJsonValue;
|
||||
} & {
|
||||
[Key in string]?: LdJsonValue | undefined;
|
||||
};
|
||||
type LdJsonArray = Array<LdJsonValue> | ReadonlyArray<LdJsonValue>;
|
||||
type LdJsonPrimitive = string | number | boolean | null;
|
||||
type LdJsonValue = LdJsonPrimitive | LdJsonObject | LdJsonArray;
|
||||
export type RouteLinkEntry = {};
|
||||
export type SearchValidator<TInput, TOutput> = ValidatorObj<TInput, TOutput> | ValidatorFn<TInput, TOutput> | ValidatorAdapter<TInput, TOutput> | StandardSchemaValidator<TInput, TOutput> | undefined;
|
||||
export type AnySearchValidator = SearchValidator<any, any>;
|
||||
export type DefaultSearchValidator = SearchValidator<Record<string, unknown>, AnySchema>;
|
||||
export type RoutePrefix<TPrefix extends string, TPath extends string> = string extends TPath ? RootRouteId : TPath extends string ? TPrefix extends RootRouteId ? TPath extends '/' ? '/' : `/${TrimPath<TPath>}` : `${TPrefix}/${TPath}` extends '/' ? '/' : `/${TrimPathLeft<`${TrimPathRight<TPrefix>}/${TrimPath<TPath>}`>}` : never;
|
||||
export type TrimPath<T extends string> = '' extends T ? '' : TrimPathRight<TrimPathLeft<T>>;
|
||||
export type TrimPathLeft<T extends string> = T extends `${RootRouteId}/${infer U}` ? TrimPathLeft<U> : T extends `/${infer U}` ? TrimPathLeft<U> : T;
|
||||
export type TrimPathRight<T extends string> = T extends '/' ? '/' : T extends `${infer U}/` ? TrimPathRight<U> : T;
|
||||
export type LooseReturnType<T> = T extends (...args: Array<any>) => infer TReturn ? TReturn : never;
|
||||
export type LooseAsyncReturnType<T> = T extends (...args: Array<any>) => infer TReturn ? TReturn extends Promise<infer TReturn> ? TReturn : TReturn : never;
|
||||
export type ContextReturnType<TContextFn> = unknown extends TContextFn ? TContextFn : LooseReturnType<TContextFn> extends never ? AnyContext : LooseReturnType<TContextFn>;
|
||||
export type ContextAsyncReturnType<TContextFn> = unknown extends TContextFn ? TContextFn : LooseAsyncReturnType<TContextFn> extends never ? AnyContext : LooseAsyncReturnType<TContextFn>;
|
||||
export type ResolveRouteContext<TRouteContextFn, TBeforeLoadFn> = Assign<ContextReturnType<TRouteContextFn>, ContextAsyncReturnType<TBeforeLoadFn>>;
|
||||
export type ResolveLoaderData<TLoaderFn> = unknown extends TLoaderFn ? TLoaderFn : LooseAsyncReturnType<TLoaderFn> extends never ? undefined : LooseAsyncReturnType<TLoaderFn>;
|
||||
export type ResolveFullSearchSchema<TParentRoute extends AnyRoute, TSearchValidator> = unknown extends TParentRoute ? ResolveValidatorOutput<TSearchValidator> : IntersectAssign<InferFullSearchSchema<TParentRoute>, ResolveValidatorOutput<TSearchValidator>>;
|
||||
export type ResolveFullSearchSchemaInput<TParentRoute extends AnyRoute, TSearchValidator> = IntersectAssign<InferFullSearchSchemaInput<TParentRoute>, ResolveSearchValidatorInput<TSearchValidator>>;
|
||||
export type ResolveAllParamsFromParent<TParentRoute extends AnyRoute, TParams> = Assign<InferAllParams<TParentRoute>, TParams>;
|
||||
export type RouteContextParameter<TParentRoute extends AnyRoute, TRouterContext> = unknown extends TParentRoute ? TRouterContext : Assign<TRouterContext, InferAllContext<TParentRoute>>;
|
||||
export type BeforeLoadContextParameter<TParentRoute extends AnyRoute, TRouterContext, TRouteContextFn> = Assign<RouteContextParameter<TParentRoute, TRouterContext>, ContextReturnType<TRouteContextFn>>;
|
||||
export type ResolveAllContext<TParentRoute extends AnyRoute, TRouterContext, TRouteContextFn, TBeforeLoadFn> = Assign<BeforeLoadContextParameter<TParentRoute, TRouterContext, TRouteContextFn>, ContextAsyncReturnType<TBeforeLoadFn>>;
|
||||
export interface FullSearchSchemaOption<in out TParentRoute extends AnyRoute, in out TSearchValidator> {
|
||||
search: Expand<ResolveFullSearchSchema<TParentRoute, TSearchValidator>>;
|
||||
}
|
||||
export interface RemountDepsOptions<in out TRouteId, in out TFullSearchSchema, in out TAllParams, in out TLoaderDeps> {
|
||||
routeId: TRouteId;
|
||||
search: TFullSearchSchema;
|
||||
params: TAllParams;
|
||||
loaderDeps: TLoaderDeps;
|
||||
}
|
||||
export type MakeRemountDepsOptionsUnion<TRouteTree extends AnyRoute = RegisteredRouter['routeTree']> = ParseRoute<TRouteTree> extends infer TRoute extends AnyRoute ? TRoute extends any ? RemountDepsOptions<TRoute['id'], TRoute['types']['fullSearchSchema'], TRoute['types']['allParams'], TRoute['types']['loaderDeps']> : never : never;
|
||||
export interface RouteTypes<in out TParentRoute extends AnyRoute, in out TPath extends string, in out TFullPath extends string, in out TCustomId extends string, in out TId extends string, in out TSearchValidator, in out TParams, in out TRouterContext, in out TRouteContextFn, in out TBeforeLoadFn, in out TLoaderDeps, in out TLoaderFn, in out TChildren, in out TFileRouteTypes> {
|
||||
parentRoute: TParentRoute;
|
||||
path: TPath;
|
||||
to: TrimPathRight<TFullPath>;
|
||||
fullPath: TFullPath;
|
||||
customId: TCustomId;
|
||||
id: TId;
|
||||
searchSchema: ResolveValidatorOutput<TSearchValidator>;
|
||||
searchSchemaInput: ResolveSearchValidatorInput<TSearchValidator>;
|
||||
searchValidator: TSearchValidator;
|
||||
fullSearchSchema: ResolveFullSearchSchema<TParentRoute, TSearchValidator>;
|
||||
fullSearchSchemaInput: ResolveFullSearchSchemaInput<TParentRoute, TSearchValidator>;
|
||||
params: TParams;
|
||||
allParams: ResolveAllParamsFromParent<TParentRoute, TParams>;
|
||||
routerContext: TRouterContext;
|
||||
routeContext: ResolveRouteContext<TRouteContextFn, TBeforeLoadFn>;
|
||||
routeContextFn: TRouteContextFn;
|
||||
beforeLoadFn: TBeforeLoadFn;
|
||||
allContext: ResolveAllContext<TParentRoute, TRouterContext, TRouteContextFn, TBeforeLoadFn>;
|
||||
children: TChildren;
|
||||
loaderData: ResolveLoaderData<TLoaderFn>;
|
||||
loaderDeps: TLoaderDeps;
|
||||
fileRouteTypes: TFileRouteTypes;
|
||||
}
|
||||
export type ResolveFullPath<TParentRoute extends AnyRoute, TPath extends string, TPrefixed = RoutePrefix<TParentRoute['fullPath'], TPath>> = TPrefixed extends RootRouteId ? '/' : TPrefixed;
|
||||
export interface RouteExtensions<TId, TFullPath> {
|
||||
}
|
||||
export type RouteLazyFn<TRoute extends AnyRoute> = (lazyFn: () => Promise<LazyRoute>) => TRoute;
|
||||
export type RouteAddChildrenFn<in out TParentRoute extends AnyRoute, in out TPath extends string, in out TFullPath extends string, in out TCustomId extends string, in out TId extends string, in out TSearchValidator, in out TParams, in out TRouterContext, in out TRouteContextFn, in out TBeforeLoadFn, in out TLoaderDeps extends Record<string, any>, in out TLoaderFn, in out TFileRouteTypes> = <const TNewChildren>(children: Constrain<TNewChildren, ReadonlyArray<AnyRoute> | Record<string, AnyRoute>>) => Route<TParentRoute, TPath, TFullPath, TCustomId, TId, TSearchValidator, TParams, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TLoaderFn, TNewChildren, TFileRouteTypes>;
|
||||
export type RouteAddFileChildrenFn<in out TParentRoute extends AnyRoute, in out TPath extends string, in out TFullPath extends string, in out TCustomId extends string, in out TId extends string, in out TSearchValidator, in out TParams, in out TRouterContext, in out TRouteContextFn, in out TBeforeLoadFn, in out TLoaderDeps extends Record<string, any>, in out TLoaderFn, in out TFileRouteTypes> = <const TNewChildren>(children: TNewChildren) => Route<TParentRoute, TPath, TFullPath, TCustomId, TId, TSearchValidator, TParams, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TLoaderFn, TNewChildren, TFileRouteTypes>;
|
||||
export type RouteAddFileTypesFn<TParentRoute extends AnyRoute, TPath extends string, TFullPath extends string, TCustomId extends string, TId extends string, TSearchValidator, TParams, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps extends Record<string, any>, TLoaderFn, TChildren> = <TNewFileRouteTypes>() => Route<TParentRoute, TPath, TFullPath, TCustomId, TId, TSearchValidator, TParams, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TLoaderFn, TChildren, TNewFileRouteTypes>;
|
||||
export interface Route<in out TParentRoute extends AnyRoute, in out TPath extends string, in out TFullPath extends string, in out TCustomId extends string, in out TId extends string, in out TSearchValidator, in out TParams, in out TRouterContext, in out TRouteContextFn, in out TBeforeLoadFn, in out TLoaderDeps extends Record<string, any>, in out TLoaderFn, in out TChildren, in out TFileRouteTypes> extends RouteExtensions<TId, TFullPath> {
|
||||
fullPath: TFullPath;
|
||||
path: TPath;
|
||||
id: TId;
|
||||
parentRoute: TParentRoute;
|
||||
children?: TChildren;
|
||||
types: RouteTypes<TParentRoute, TPath, TFullPath, TCustomId, TId, TSearchValidator, TParams, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TLoaderFn, TChildren, TFileRouteTypes>;
|
||||
options: RouteOptions<TParentRoute, TId, TCustomId, TFullPath, TPath, TSearchValidator, TParams, TLoaderDeps, TLoaderFn, TRouterContext, TRouteContextFn, TBeforeLoadFn>;
|
||||
isRoot: TParentRoute extends AnyRoute ? true : false;
|
||||
_componentsPromise?: Promise<Array<void>>;
|
||||
lazyFn?: () => Promise<LazyRoute>;
|
||||
_lazyPromise?: Promise<void>;
|
||||
rank: number;
|
||||
to: TrimPathRight<TFullPath>;
|
||||
init: (opts: {
|
||||
originalIndex: number;
|
||||
defaultSsr?: boolean;
|
||||
}) => void;
|
||||
update: (options: UpdatableRouteOptions<TParentRoute, TCustomId, TFullPath, TParams, TSearchValidator, TLoaderFn, TLoaderDeps, TRouterContext, TRouteContextFn, TBeforeLoadFn>) => this;
|
||||
lazy: RouteLazyFn<this>;
|
||||
addChildren: RouteAddChildrenFn<TParentRoute, TPath, TFullPath, TCustomId, TId, TSearchValidator, TParams, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TLoaderFn, TFileRouteTypes>;
|
||||
_addFileChildren: RouteAddFileChildrenFn<TParentRoute, TPath, TFullPath, TCustomId, TId, TSearchValidator, TParams, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TLoaderFn, TFileRouteTypes>;
|
||||
_addFileTypes: RouteAddFileTypesFn<TParentRoute, TPath, TFullPath, TCustomId, TId, TSearchValidator, TParams, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TLoaderFn, TChildren>;
|
||||
}
|
||||
export type AnyRoute = Route<any, any, any, any, any, any, any, any, any, any, any, any, any, any>;
|
||||
export type AnyRouteWithContext<TContext> = AnyRoute & {
|
||||
types: {
|
||||
allContext: TContext;
|
||||
};
|
||||
};
|
||||
export type RouteOptions<TParentRoute extends AnyRoute = AnyRoute, TId extends string = string, TCustomId extends string = string, TFullPath extends string = string, TPath extends string = string, TSearchValidator = undefined, TParams = AnyPathParams, TLoaderDeps extends Record<string, any> = {}, TLoaderFn = undefined, TRouterContext = {}, TRouteContextFn = AnyContext, TBeforeLoadFn = AnyContext> = BaseRouteOptions<TParentRoute, TId, TCustomId, TPath, TSearchValidator, TParams, TLoaderDeps, TLoaderFn, TRouterContext, TRouteContextFn, TBeforeLoadFn> & UpdatableRouteOptions<NoInfer<TParentRoute>, NoInfer<TCustomId>, NoInfer<TFullPath>, NoInfer<TParams>, NoInfer<TSearchValidator>, NoInfer<TLoaderFn>, NoInfer<TLoaderDeps>, NoInfer<TRouterContext>, NoInfer<TRouteContextFn>, NoInfer<TBeforeLoadFn>>;
|
||||
export type RouteContextFn<in out TParentRoute extends AnyRoute, in out TSearchValidator, in out TParams, in out TRouterContext> = (ctx: RouteContextOptions<TParentRoute, TSearchValidator, TParams, TRouterContext>) => any;
|
||||
export type BeforeLoadFn<in out TParentRoute extends AnyRoute, in out TSearchValidator, in out TParams, in out TRouterContext, in out TRouteContextFn> = (ctx: BeforeLoadContextOptions<TParentRoute, TSearchValidator, TParams, TRouterContext, TRouteContextFn>) => any;
|
||||
export type FileBaseRouteOptions<TParentRoute extends AnyRoute = AnyRoute, TId extends string = string, TPath extends string = string, TSearchValidator = undefined, TParams = {}, TLoaderDeps extends Record<string, any> = {}, TLoaderFn = undefined, TRouterContext = {}, TRouteContextFn = AnyContext, TBeforeLoadFn = AnyContext, TRemountDepsFn = AnyContext> = ParamsOptions<TPath, TParams> & {
|
||||
validateSearch?: Constrain<TSearchValidator, AnyValidator, DefaultValidator>;
|
||||
shouldReload?: boolean | ((match: LoaderFnContext<TParentRoute, TId, TParams, TLoaderDeps, TRouterContext, TRouteContextFn, TBeforeLoadFn>) => any);
|
||||
context?: Constrain<TRouteContextFn, (ctx: RouteContextOptions<TParentRoute, TParams, TRouterContext, TLoaderDeps>) => any>;
|
||||
beforeLoad?: Constrain<TBeforeLoadFn, (ctx: BeforeLoadContextOptions<TParentRoute, TSearchValidator, TParams, TRouterContext, TRouteContextFn>) => any>;
|
||||
loaderDeps?: (opts: FullSearchSchemaOption<TParentRoute, TSearchValidator>) => TLoaderDeps;
|
||||
remountDeps?: Constrain<TRemountDepsFn, (opt: RemountDepsOptions<TId, FullSearchSchemaOption<TParentRoute, TSearchValidator>, Expand<ResolveAllParamsFromParent<TParentRoute, TParams>>, TLoaderDeps>) => any>;
|
||||
loader?: Constrain<TLoaderFn, (ctx: LoaderFnContext<TParentRoute, TId, TParams, TLoaderDeps, TRouterContext, TRouteContextFn, TBeforeLoadFn>) => any>;
|
||||
};
|
||||
export type BaseRouteOptions<TParentRoute extends AnyRoute = AnyRoute, TId extends string = string, TCustomId extends string = string, TPath extends string = string, TSearchValidator = undefined, TParams = {}, TLoaderDeps extends Record<string, any> = {}, TLoaderFn = undefined, TRouterContext = {}, TRouteContextFn = AnyContext, TBeforeLoadFn = AnyContext> = RoutePathOptions<TCustomId, TPath> & FileBaseRouteOptions<TParentRoute, TId, TPath, TSearchValidator, TParams, TLoaderDeps, TLoaderFn, TRouterContext, TRouteContextFn, TBeforeLoadFn> & {
|
||||
getParentRoute: () => TParentRoute;
|
||||
};
|
||||
export interface ContextOptions<in out TParentRoute extends AnyRoute, in out TParams> {
|
||||
abortController: AbortController;
|
||||
preload: boolean;
|
||||
params: Expand<ResolveAllParamsFromParent<TParentRoute, TParams>>;
|
||||
location: ParsedLocation;
|
||||
/**
|
||||
* @deprecated Use `throw redirect({ to: '/somewhere' })` instead
|
||||
**/
|
||||
navigate: NavigateFn;
|
||||
buildLocation: BuildLocationFn;
|
||||
cause: 'preload' | 'enter' | 'stay';
|
||||
matches: Array<MakeRouteMatchUnion>;
|
||||
}
|
||||
export interface RouteContextOptions<in out TParentRoute extends AnyRoute, in out TParams, in out TRouterContext, in out TLoaderDeps> extends ContextOptions<TParentRoute, TParams> {
|
||||
deps: TLoaderDeps;
|
||||
context: Expand<RouteContextParameter<TParentRoute, TRouterContext>>;
|
||||
}
|
||||
export interface BeforeLoadContextOptions<in out TParentRoute extends AnyRoute, in out TSearchValidator, in out TParams, in out TRouterContext, in out TRouteContextFn> extends ContextOptions<TParentRoute, TParams>, FullSearchSchemaOption<TParentRoute, TSearchValidator> {
|
||||
context: Expand<BeforeLoadContextParameter<TParentRoute, TRouterContext, TRouteContextFn>>;
|
||||
}
|
||||
type AssetFnContextOptions<in out TRouteId, in out TFullPath, in out TParentRoute extends AnyRoute, in out TParams, in out TSearchValidator, in out TLoaderFn, in out TRouterContext, in out TRouteContextFn, in out TBeforeLoadFn, in out TLoaderDeps> = {
|
||||
matches: Array<RouteMatch<TRouteId, TFullPath, ResolveAllParamsFromParent<TParentRoute, TParams>, ResolveFullSearchSchema<TParentRoute, TSearchValidator>, ResolveLoaderData<TLoaderFn>, ResolveAllContext<TParentRoute, TRouterContext, TRouteContextFn, TBeforeLoadFn>, TLoaderDeps>>;
|
||||
match: RouteMatch<TRouteId, TFullPath, ResolveAllParamsFromParent<TParentRoute, TParams>, ResolveFullSearchSchema<TParentRoute, TSearchValidator>, ResolveLoaderData<TLoaderFn>, ResolveAllContext<TParentRoute, TRouterContext, TRouteContextFn, TBeforeLoadFn>, TLoaderDeps>;
|
||||
params: ResolveAllParamsFromParent<TParentRoute, TParams>;
|
||||
loaderData: ResolveLoaderData<TLoaderFn>;
|
||||
};
|
||||
export interface DefaultUpdatableRouteOptionsExtensions {
|
||||
component?: unknown;
|
||||
errorComponent?: unknown;
|
||||
notFoundComponent?: unknown;
|
||||
pendingComponent?: unknown;
|
||||
}
|
||||
export interface UpdatableRouteOptionsExtensions extends DefaultUpdatableRouteOptionsExtensions {
|
||||
}
|
||||
export interface UpdatableRouteOptions<in out TParentRoute extends AnyRoute, in out TRouteId, in out TFullPath, in out TParams, in out TSearchValidator, in out TLoaderFn, in out TLoaderDeps, in out TRouterContext, in out TRouteContextFn, in out TBeforeLoadFn> extends UpdatableStaticRouteOption, UpdatableRouteOptionsExtensions {
|
||||
caseSensitive?: boolean;
|
||||
wrapInSuspense?: boolean;
|
||||
pendingMs?: number;
|
||||
pendingMinMs?: number;
|
||||
staleTime?: number;
|
||||
gcTime?: number;
|
||||
preload?: boolean;
|
||||
preloadStaleTime?: number;
|
||||
preloadGcTime?: number;
|
||||
search?: {
|
||||
middlewares?: Array<SearchMiddleware<ResolveFullSearchSchemaInput<TParentRoute, TSearchValidator>>>;
|
||||
};
|
||||
/**
|
||||
@deprecated Use search.middlewares instead
|
||||
*/
|
||||
preSearchFilters?: Array<SearchFilter<ResolveFullSearchSchema<TParentRoute, TSearchValidator>>>;
|
||||
/**
|
||||
@deprecated Use search.middlewares instead
|
||||
*/
|
||||
postSearchFilters?: Array<SearchFilter<ResolveFullSearchSchema<TParentRoute, TSearchValidator>>>;
|
||||
onCatch?: (error: Error) => void;
|
||||
onError?: (err: any) => void;
|
||||
onEnter?: (match: RouteMatch<TRouteId, TFullPath, ResolveAllParamsFromParent<TParentRoute, TParams>, ResolveFullSearchSchema<TParentRoute, TSearchValidator>, ResolveLoaderData<TLoaderFn>, ResolveAllContext<TParentRoute, TRouterContext, TRouteContextFn, TBeforeLoadFn>, TLoaderDeps>) => void;
|
||||
onStay?: (match: RouteMatch<TRouteId, TFullPath, ResolveAllParamsFromParent<TParentRoute, TParams>, ResolveFullSearchSchema<TParentRoute, TSearchValidator>, ResolveLoaderData<TLoaderFn>, ResolveAllContext<TParentRoute, TRouterContext, TRouteContextFn, TBeforeLoadFn>, TLoaderDeps>) => void;
|
||||
onLeave?: (match: RouteMatch<TRouteId, TFullPath, ResolveAllParamsFromParent<TParentRoute, TParams>, ResolveFullSearchSchema<TParentRoute, TSearchValidator>, ResolveLoaderData<TLoaderFn>, ResolveAllContext<TParentRoute, TRouterContext, TRouteContextFn, TBeforeLoadFn>, TLoaderDeps>) => void;
|
||||
headers?: (ctx: {
|
||||
loaderData: ResolveLoaderData<TLoaderFn>;
|
||||
}) => Record<string, string>;
|
||||
head?: (ctx: AssetFnContextOptions<TRouteId, TFullPath, TParentRoute, TParams, TSearchValidator, TLoaderFn, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps>) => {
|
||||
links?: AnyRouteMatch['links'];
|
||||
scripts?: AnyRouteMatch['headScripts'];
|
||||
meta?: AnyRouteMatch['meta'];
|
||||
};
|
||||
scripts?: (ctx: AssetFnContextOptions<TRouteId, TFullPath, TParentRoute, TParams, TSearchValidator, TLoaderFn, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps>) => AnyRouteMatch['scripts'];
|
||||
ssr?: boolean;
|
||||
codeSplitGroupings?: Array<Array<'loader' | 'component' | 'pendingComponent' | 'notFoundComponent' | 'errorComponent'>>;
|
||||
}
|
||||
export type RouteLoaderFn<in out TParentRoute extends AnyRoute = AnyRoute, in out TId extends string = string, in out TParams = {}, in out TLoaderDeps = {}, in out TRouterContext = {}, in out TRouteContextFn = AnyContext, in out TBeforeLoadFn = AnyContext> = (match: LoaderFnContext<TParentRoute, TId, TParams, TLoaderDeps, TRouterContext, TRouteContextFn, TBeforeLoadFn>) => any;
|
||||
export interface LoaderFnContext<in out TParentRoute extends AnyRoute = AnyRoute, in out TId extends string = string, in out TParams = {}, in out TLoaderDeps = {}, in out TRouterContext = {}, in out TRouteContextFn = AnyContext, in out TBeforeLoadFn = AnyContext> {
|
||||
abortController: AbortController;
|
||||
preload: boolean;
|
||||
params: Expand<ResolveAllParamsFromParent<TParentRoute, TParams>>;
|
||||
deps: TLoaderDeps;
|
||||
context: Expand<ResolveAllContext<TParentRoute, TRouterContext, TRouteContextFn, TBeforeLoadFn>>;
|
||||
location: ParsedLocation;
|
||||
/**
|
||||
* @deprecated Use `throw redirect({ to: '/somewhere' })` instead
|
||||
**/
|
||||
navigate: (opts: NavigateOptions<AnyRouter>) => Promise<void> | void;
|
||||
parentMatchPromise: TId extends RootRouteId ? never : Promise<MakeRouteMatchFromRoute<TParentRoute>>;
|
||||
cause: 'preload' | 'enter' | 'stay';
|
||||
route: AnyRoute;
|
||||
}
|
||||
export type RootRouteOptions<TSearchValidator = undefined, TRouterContext = {}, TRouteContextFn = AnyContext, TBeforeLoadFn = AnyContext, TLoaderDeps extends Record<string, any> = {}, TLoaderFn = undefined> = Omit<RouteOptions<any, // TParentRoute
|
||||
RootRouteId, // TId
|
||||
RootRouteId, // TCustomId
|
||||
'', // TFullPath
|
||||
'', // TPath
|
||||
TSearchValidator, {}, // TParams
|
||||
TLoaderDeps, TLoaderFn, TRouterContext, TRouteContextFn, TBeforeLoadFn>, 'path' | 'id' | 'getParentRoute' | 'caseSensitive' | 'parseParams' | 'stringifyParams' | 'params'>;
|
||||
export type RouteConstraints = {
|
||||
TParentRoute: AnyRoute;
|
||||
TPath: string;
|
||||
TFullPath: string;
|
||||
TCustomId: string;
|
||||
TId: string;
|
||||
TSearchSchema: AnySchema;
|
||||
TFullSearchSchema: AnySchema;
|
||||
TParams: Record<string, any>;
|
||||
TAllParams: Record<string, any>;
|
||||
TParentContext: AnyContext;
|
||||
TRouteContext: RouteContext;
|
||||
TAllContext: AnyContext;
|
||||
TRouterContext: AnyContext;
|
||||
TChildren: unknown;
|
||||
TRouteTree: AnyRoute;
|
||||
};
|
||||
export type RouteTypesById<TRouter extends AnyRouter, TId> = RouteById<TRouter['routeTree'], TId>['types'];
|
||||
export type RouteMask<TRouteTree extends AnyRoute> = {
|
||||
routeTree: TRouteTree;
|
||||
from: RoutePaths<TRouteTree>;
|
||||
to?: any;
|
||||
params?: any;
|
||||
search?: any;
|
||||
hash?: any;
|
||||
state?: any;
|
||||
unmaskOnReload?: boolean;
|
||||
};
|
||||
/**
|
||||
* @deprecated Use `ErrorComponentProps` instead.
|
||||
*/
|
||||
export type ErrorRouteProps = {
|
||||
error: unknown;
|
||||
info?: {
|
||||
componentStack: string;
|
||||
};
|
||||
reset: () => void;
|
||||
};
|
||||
export type ErrorComponentProps = {
|
||||
error: Error;
|
||||
info?: {
|
||||
componentStack: string;
|
||||
};
|
||||
reset: () => void;
|
||||
};
|
||||
export type NotFoundRouteProps = {
|
||||
data: unknown;
|
||||
};
|
||||
export declare class BaseRoute<in out TParentRoute extends AnyRoute = AnyRoute, in out TPath extends string = '/', in out TFullPath extends string = ResolveFullPath<TParentRoute, TPath>, in out TCustomId extends string = string, in out TId extends string = ResolveId<TParentRoute, TCustomId, TPath>, in out TSearchValidator = undefined, in out TParams = ResolveParams<TPath>, in out TRouterContext = AnyContext, in out TRouteContextFn = AnyContext, in out TBeforeLoadFn = AnyContext, in out TLoaderDeps extends Record<string, any> = {}, in out TLoaderFn = undefined, in out TChildren = unknown, in out TFileRouteTypes = unknown> implements Route<TParentRoute, TPath, TFullPath, TCustomId, TId, TSearchValidator, TParams, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TLoaderFn, TChildren, TFileRouteTypes> {
|
||||
isRoot: TParentRoute extends AnyRoute ? true : false;
|
||||
options: RouteOptions<TParentRoute, TId, TCustomId, TFullPath, TPath, TSearchValidator, TParams, TLoaderDeps, TLoaderFn, TRouterContext, TRouteContextFn, TBeforeLoadFn>;
|
||||
parentRoute: TParentRoute;
|
||||
private _id;
|
||||
private _path;
|
||||
private _fullPath;
|
||||
private _to;
|
||||
private _ssr;
|
||||
get to(): TrimPathRight<TFullPath>;
|
||||
get id(): TId;
|
||||
get path(): TPath;
|
||||
get fullPath(): TFullPath;
|
||||
get ssr(): boolean;
|
||||
children?: TChildren;
|
||||
originalIndex?: number;
|
||||
rank: number;
|
||||
lazyFn?: () => Promise<LazyRoute>;
|
||||
_lazyPromise?: Promise<void>;
|
||||
_componentsPromise?: Promise<Array<void>>;
|
||||
constructor(options?: RouteOptions<TParentRoute, TId, TCustomId, TFullPath, TPath, TSearchValidator, TParams, TLoaderDeps, TLoaderFn, TRouterContext, TRouteContextFn, TBeforeLoadFn>);
|
||||
types: RouteTypes<TParentRoute, TPath, TFullPath, TCustomId, TId, TSearchValidator, TParams, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TLoaderFn, TChildren, TFileRouteTypes>;
|
||||
init: (opts: {
|
||||
originalIndex: number;
|
||||
defaultSsr?: boolean;
|
||||
}) => void;
|
||||
addChildren: RouteAddChildrenFn<TParentRoute, TPath, TFullPath, TCustomId, TId, TSearchValidator, TParams, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TLoaderFn, TFileRouteTypes>;
|
||||
_addFileChildren: RouteAddFileChildrenFn<TParentRoute, TPath, TFullPath, TCustomId, TId, TSearchValidator, TParams, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TLoaderFn, TFileRouteTypes>;
|
||||
_addFileTypes: RouteAddFileTypesFn<TParentRoute, TPath, TFullPath, TCustomId, TId, TSearchValidator, TParams, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TLoaderFn, TChildren>;
|
||||
updateLoader: <TNewLoaderFn>(options: {
|
||||
loader: Constrain<TNewLoaderFn, RouteLoaderFn<TParentRoute, TCustomId, TParams, TLoaderDeps, TRouterContext, TRouteContextFn, TBeforeLoadFn>>;
|
||||
}) => BaseRoute<TParentRoute, TPath, TFullPath, TCustomId, TId, TSearchValidator, TParams, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TNewLoaderFn, TChildren, TFileRouteTypes>;
|
||||
update: (options: UpdatableRouteOptions<TParentRoute, TCustomId, TFullPath, TParams, TSearchValidator, TLoaderFn, TLoaderDeps, TRouterContext, TRouteContextFn, TBeforeLoadFn>) => this;
|
||||
lazy: RouteLazyFn<this>;
|
||||
}
|
||||
export declare class BaseRouteApi<TId, TRouter extends AnyRouter = RegisteredRouter> {
|
||||
id: TId;
|
||||
constructor({ id }: {
|
||||
id: TId;
|
||||
});
|
||||
notFound: (opts?: NotFoundError) => NotFoundError;
|
||||
}
|
||||
export declare class BaseRootRoute<in out TSearchValidator = undefined, in out TRouterContext = {}, in out TRouteContextFn = AnyContext, in out TBeforeLoadFn = AnyContext, in out TLoaderDeps extends Record<string, any> = {}, in out TLoaderFn = undefined, in out TChildren = unknown, in out TFileRouteTypes = unknown> extends BaseRoute<any, // TParentRoute
|
||||
'/', // TPath
|
||||
'/', // TFullPath
|
||||
string, // TCustomId
|
||||
RootRouteId, // TId
|
||||
TSearchValidator, // TSearchValidator
|
||||
{}, // TParams
|
||||
TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TLoaderFn, TChildren, // TChildren
|
||||
TFileRouteTypes> {
|
||||
constructor(options?: RootRouteOptions<TSearchValidator, TRouterContext, TRouteContextFn, TBeforeLoadFn, TLoaderDeps, TLoaderFn>);
|
||||
}
|
||||
export {};
|
||||
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2021 Tanner Linsley
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -0,0 +1,54 @@
|
||||
{
|
||||
"name": "p-locate",
|
||||
"version": "5.0.0",
|
||||
"description": "Get the first fulfilled promise that satisfies the provided testing function",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/p-locate",
|
||||
"funding": "https://github.com/sponsors/sindresorhus",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "https://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava && tsd"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"promise",
|
||||
"locate",
|
||||
"find",
|
||||
"finder",
|
||||
"search",
|
||||
"searcher",
|
||||
"test",
|
||||
"array",
|
||||
"collection",
|
||||
"iterable",
|
||||
"iterator",
|
||||
"race",
|
||||
"fulfilled",
|
||||
"fastest",
|
||||
"async",
|
||||
"await",
|
||||
"promises",
|
||||
"bluebird"
|
||||
],
|
||||
"dependencies": {
|
||||
"p-limit": "^3.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"ava": "^2.4.0",
|
||||
"delay": "^4.1.0",
|
||||
"in-range": "^2.0.0",
|
||||
"time-span": "^4.0.0",
|
||||
"tsd": "^0.13.1",
|
||||
"xo": "^0.32.1"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
import { useContext } from 'react';
|
||||
import DocumentContext from '../../DocumentContext.js';
|
||||
export default function useDocumentContext() {
|
||||
return useContext(DocumentContext);
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = isValidES3Identifier;
|
||||
var _isValidIdentifier = require("./isValidIdentifier.js");
|
||||
const RESERVED_WORDS_ES3_ONLY = new Set(["abstract", "boolean", "byte", "char", "double", "enum", "final", "float", "goto", "implements", "int", "interface", "long", "native", "package", "private", "protected", "public", "short", "static", "synchronized", "throws", "transient", "volatile"]);
|
||||
function isValidES3Identifier(name) {
|
||||
return (0, _isValidIdentifier.default)(name) && !RESERVED_WORDS_ES3_ONLY.has(name);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=isValidES3Identifier.js.map
|
||||
@@ -0,0 +1,334 @@
|
||||
[](https://www.npmjs.com/package/eslint)
|
||||
[](https://www.npmjs.com/package/eslint)
|
||||
[](https://github.com/eslint/eslint/actions)
|
||||
<br>
|
||||
[](https://opencollective.com/eslint)
|
||||
[](https://opencollective.com/eslint)
|
||||
|
||||
# ESLint
|
||||
|
||||
[Website](https://eslint.org) |
|
||||
[Configure ESLint](https://eslint.org/docs/latest/use/configure) |
|
||||
[Rules](https://eslint.org/docs/rules/) |
|
||||
[Contribute to ESLint](https://eslint.org/docs/latest/contribute) |
|
||||
[Report Bugs](https://eslint.org/docs/latest/contribute/report-bugs) |
|
||||
[Code of Conduct](https://eslint.org/conduct) |
|
||||
[Twitter](https://twitter.com/geteslint) |
|
||||
[Discord](https://eslint.org/chat) |
|
||||
[Mastodon](https://fosstodon.org/@eslint) |
|
||||
[Bluesky](https://bsky.app/profile/eslint.org)
|
||||
|
||||
ESLint is a tool for identifying and reporting on patterns found in ECMAScript/JavaScript code. In many ways, it is similar to JSLint and JSHint with a few exceptions:
|
||||
|
||||
- ESLint uses [Espree](https://github.com/eslint/js/tree/main/packages/espree) for JavaScript parsing.
|
||||
- ESLint uses an AST to evaluate patterns in code.
|
||||
- ESLint is completely pluggable, every single rule is a plugin and you can add more at runtime.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [Installation and Usage](#installation-and-usage)
|
||||
1. [Configuration](#configuration)
|
||||
1. [Version Support](#version-support)
|
||||
1. [Code of Conduct](#code-of-conduct)
|
||||
1. [Filing Issues](#filing-issues)
|
||||
1. [Frequently Asked Questions](#frequently-asked-questions)
|
||||
1. [Releases](#releases)
|
||||
1. [Security Policy](#security-policy)
|
||||
1. [Semantic Versioning Policy](#semantic-versioning-policy)
|
||||
1. [License](#license)
|
||||
1. [Team](#team)
|
||||
1. [Sponsors](#sponsors)
|
||||
1. [Technology Sponsors](#technology-sponsors) <!-- markdownlint-disable-line MD051 -->
|
||||
|
||||
## Installation and Usage
|
||||
|
||||
Prerequisites: [Node.js](https://nodejs.org/) (`^18.18.0`, `^20.9.0`, or `>=21.1.0`) built with SSL support. (If you are using an official Node.js distribution, SSL is always built in.)
|
||||
|
||||
You can install and configure ESLint using this command:
|
||||
|
||||
```shell
|
||||
npm init @eslint/config@latest
|
||||
```
|
||||
|
||||
After that, you can run ESLint on any file or directory like this:
|
||||
|
||||
```shell
|
||||
npx eslint yourfile.js
|
||||
```
|
||||
|
||||
### pnpm Installation
|
||||
|
||||
To use ESLint with pnpm, we recommend setting up a `.npmrc` file with at least the following settings:
|
||||
|
||||
```text
|
||||
auto-install-peers=true
|
||||
node-linker=hoisted
|
||||
```
|
||||
|
||||
This ensures that pnpm installs dependencies in a way that is more compatible with npm and is less likely to produce errors.
|
||||
|
||||
## Configuration
|
||||
|
||||
You can configure rules in your `eslint.config.js` files as in this example:
|
||||
|
||||
```js
|
||||
import { defineConfig } from "eslint/config";
|
||||
|
||||
export default defineConfig([
|
||||
{
|
||||
files: ["**/*.js", "**/*.cjs", "**/*.mjs"],
|
||||
rules: {
|
||||
"prefer-const": "warn",
|
||||
"no-constant-binary-expression": "error",
|
||||
},
|
||||
},
|
||||
]);
|
||||
```
|
||||
|
||||
The names `"prefer-const"` and `"no-constant-binary-expression"` are the names of [rules](https://eslint.org/docs/rules) in ESLint. The first value is the error level of the rule and can be one of these values:
|
||||
|
||||
- `"off"` or `0` - turn the rule off
|
||||
- `"warn"` or `1` - turn the rule on as a warning (doesn't affect exit code)
|
||||
- `"error"` or `2` - turn the rule on as an error (exit code will be 1)
|
||||
|
||||
The three error levels allow you fine-grained control over how ESLint applies rules (for more configuration options and details, see the [configuration docs](https://eslint.org/docs/latest/use/configure)).
|
||||
|
||||
## Version Support
|
||||
|
||||
The ESLint team provides ongoing support for the current version and six months of limited support for the previous version. Limited support includes critical bug fixes, security issues, and compatibility issues only.
|
||||
|
||||
ESLint offers commercial support for both current and previous versions through our partners, [Tidelift][tidelift] and [HeroDevs][herodevs].
|
||||
|
||||
See [Version Support](https://eslint.org/version-support) for more details.
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
ESLint adheres to the [OpenJS Foundation Code of Conduct](https://eslint.org/conduct).
|
||||
|
||||
## Filing Issues
|
||||
|
||||
Before filing an issue, please be sure to read the guidelines for what you're reporting:
|
||||
|
||||
- [Bug Report](https://eslint.org/docs/latest/contribute/report-bugs)
|
||||
- [Propose a New Rule](https://eslint.org/docs/latest/contribute/propose-new-rule)
|
||||
- [Proposing a Rule Change](https://eslint.org/docs/latest/contribute/propose-rule-change)
|
||||
- [Request a Change](https://eslint.org/docs/latest/contribute/request-change)
|
||||
|
||||
## Frequently Asked Questions
|
||||
|
||||
### Does ESLint support JSX?
|
||||
|
||||
Yes, ESLint natively supports parsing JSX syntax (this must be enabled in [configuration](https://eslint.org/docs/latest/use/configure)). Please note that supporting JSX syntax _is not_ the same as supporting React. React applies specific semantics to JSX syntax that ESLint doesn't recognize. We recommend using [eslint-plugin-react](https://www.npmjs.com/package/eslint-plugin-react) if you are using React and want React semantics.
|
||||
|
||||
### Does Prettier replace ESLint?
|
||||
|
||||
No, ESLint and Prettier have different jobs: ESLint is a linter (looking for problematic patterns) and Prettier is a code formatter. Using both tools is common, refer to [Prettier's documentation](https://prettier.io/docs/en/install#eslint-and-other-linters) to learn how to configure them to work well with each other.
|
||||
|
||||
### What ECMAScript versions does ESLint support?
|
||||
|
||||
ESLint has full support for ECMAScript 3, 5, and every year from 2015 up until the most recent stage 4 specification (the default). You can set your desired ECMAScript syntax and other settings (like global variables) through [configuration](https://eslint.org/docs/latest/use/configure).
|
||||
|
||||
### What about experimental features?
|
||||
|
||||
ESLint's parser only officially supports the latest final ECMAScript standard. We will make changes to core rules in order to avoid crashes on stage 3 ECMAScript syntax proposals (as long as they are implemented using the correct experimental ESTree syntax). We may make changes to core rules to better work with language extensions (such as JSX, Flow, and TypeScript) on a case-by-case basis.
|
||||
|
||||
In other cases (including if rules need to warn on more or fewer cases due to new syntax, rather than just not crashing), we recommend you use other parsers and/or rule plugins. If you are using Babel, you can use [@babel/eslint-parser](https://www.npmjs.com/package/@babel/eslint-parser) and [@babel/eslint-plugin](https://www.npmjs.com/package/@babel/eslint-plugin) to use any option available in Babel.
|
||||
|
||||
Once a language feature has been adopted into the ECMAScript standard (stage 4 according to the [TC39 process](https://tc39.github.io/process-document/)), we will accept issues and pull requests related to the new feature, subject to our [contributing guidelines](https://eslint.org/docs/latest/contribute). Until then, please use the appropriate parser and plugin(s) for your experimental feature.
|
||||
|
||||
### Which Node.js versions does ESLint support?
|
||||
|
||||
ESLint updates the supported Node.js versions with each major release of ESLint. At that time, ESLint's supported Node.js versions are updated to be:
|
||||
|
||||
1. The most recent maintenance release of Node.js
|
||||
1. The lowest minor version of the Node.js LTS release that includes the features the ESLint team wants to use.
|
||||
1. The Node.js Current release
|
||||
|
||||
ESLint is also expected to work with Node.js versions released after the Node.js Current release.
|
||||
|
||||
Refer to the [Quick Start Guide](https://eslint.org/docs/latest/use/getting-started#prerequisites) for the officially supported Node.js versions for a given ESLint release.
|
||||
|
||||
### Where to ask for help?
|
||||
|
||||
Open a [discussion](https://github.com/eslint/eslint/discussions) or stop by our [Discord server](https://eslint.org/chat).
|
||||
|
||||
### Why doesn't ESLint lock dependency versions?
|
||||
|
||||
Lock files like `package-lock.json` are helpful for deployed applications. They ensure that dependencies are consistent between environments and across deployments.
|
||||
|
||||
Packages like `eslint` that get published to the npm registry do not include lock files. `npm install eslint` as a user will respect version constraints in ESLint's `package.json`. ESLint and its dependencies will be included in the user's lock file if one exists, but ESLint's own lock file would not be used.
|
||||
|
||||
We intentionally don't lock dependency versions so that we have the latest compatible dependency versions in development and CI that our users get when installing ESLint in a project.
|
||||
|
||||
The Twilio blog has a [deeper dive](https://www.twilio.com/blog/lockfiles-nodejs) to learn more.
|
||||
|
||||
## Releases
|
||||
|
||||
We have scheduled releases every two weeks on Friday or Saturday. You can follow a [release issue](https://github.com/eslint/eslint/issues?q=is%3Aopen+is%3Aissue+label%3Arelease) for updates about the scheduling of any particular release.
|
||||
|
||||
## Security Policy
|
||||
|
||||
ESLint takes security seriously. We work hard to ensure that ESLint is safe for everyone and that security issues are addressed quickly and responsibly. Read the full [security policy](https://github.com/eslint/.github/blob/master/SECURITY.md).
|
||||
|
||||
## Semantic Versioning Policy
|
||||
|
||||
ESLint follows [semantic versioning](https://semver.org). However, due to the nature of ESLint as a code quality tool, it's not always clear when a minor or major version bump occurs. To help clarify this for everyone, we've defined the following semantic versioning policy for ESLint:
|
||||
|
||||
- Patch release (intended to not break your lint build)
|
||||
- A bug fix in a rule that results in ESLint reporting fewer linting errors.
|
||||
- A bug fix to the CLI or core (including formatters).
|
||||
- Improvements to documentation.
|
||||
- Non-user-facing changes such as refactoring code, adding, deleting, or modifying tests, and increasing test coverage.
|
||||
- Re-releasing after a failed release (i.e., publishing a release that doesn't work for anyone).
|
||||
- Minor release (might break your lint build)
|
||||
- A bug fix in a rule that results in ESLint reporting more linting errors.
|
||||
- A new rule is created.
|
||||
- A new option to an existing rule that does not result in ESLint reporting more linting errors by default.
|
||||
- A new addition to an existing rule to support a newly-added language feature (within the last 12 months) that will result in ESLint reporting more linting errors by default.
|
||||
- An existing rule is deprecated.
|
||||
- A new CLI capability is created.
|
||||
- New capabilities to the public API are added (new classes, new methods, new arguments to existing methods, etc.).
|
||||
- A new formatter is created.
|
||||
- `eslint:recommended` is updated and will result in strictly fewer linting errors (e.g., rule removals).
|
||||
- Major release (likely to break your lint build)
|
||||
- `eslint:recommended` is updated and may result in new linting errors (e.g., rule additions, most rule option updates).
|
||||
- A new option to an existing rule that results in ESLint reporting more linting errors by default.
|
||||
- An existing formatter is removed.
|
||||
- Part of the public API is removed or changed in an incompatible way. The public API includes:
|
||||
- Rule schemas
|
||||
- Configuration schema
|
||||
- Command-line options
|
||||
- Node.js API
|
||||
- Rule, formatter, parser, plugin APIs
|
||||
|
||||
According to our policy, any minor update may report more linting errors than the previous release (ex: from a bug fix). As such, we recommend using the tilde (`~`) in `package.json` e.g. `"eslint": "~3.1.0"` to guarantee the results of your builds.
|
||||
|
||||
## License
|
||||
|
||||
MIT License
|
||||
|
||||
Copyright OpenJS Foundation and other contributors, <www.openjsf.org>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
## Team
|
||||
|
||||
These folks keep the project moving and are resources for help.
|
||||
|
||||
<!-- NOTE: This section is autogenerated. Do not manually edit.-->
|
||||
|
||||
<!--teamstart-->
|
||||
|
||||
### Technical Steering Committee (TSC)
|
||||
|
||||
The people who manage releases, review feature requests, and meet regularly to ensure ESLint is properly maintained.
|
||||
|
||||
<table><tbody><tr><td align="center" valign="top" width="11%">
|
||||
<a href="https://github.com/nzakas">
|
||||
<img src="https://github.com/nzakas.png?s=75" width="75" height="75" alt="Nicholas C. Zakas's Avatar"><br />
|
||||
Nicholas C. Zakas
|
||||
</a>
|
||||
</td><td align="center" valign="top" width="11%">
|
||||
<a href="https://github.com/fasttime">
|
||||
<img src="https://github.com/fasttime.png?s=75" width="75" height="75" alt="Francesco Trotta's Avatar"><br />
|
||||
Francesco Trotta
|
||||
</a>
|
||||
</td><td align="center" valign="top" width="11%">
|
||||
<a href="https://github.com/mdjermanovic">
|
||||
<img src="https://github.com/mdjermanovic.png?s=75" width="75" height="75" alt="Milos Djermanovic's Avatar"><br />
|
||||
Milos Djermanovic
|
||||
</a>
|
||||
</td></tr></tbody></table>
|
||||
|
||||
### Reviewers
|
||||
|
||||
The people who review and implement new features.
|
||||
|
||||
<table><tbody><tr><td align="center" valign="top" width="11%">
|
||||
<a href="https://github.com/aladdin-add">
|
||||
<img src="https://github.com/aladdin-add.png?s=75" width="75" height="75" alt="唯然's Avatar"><br />
|
||||
唯然
|
||||
</a>
|
||||
</td><td align="center" valign="top" width="11%">
|
||||
<a href="https://github.com/snitin315">
|
||||
<img src="https://github.com/snitin315.png?s=75" width="75" height="75" alt="Nitin Kumar's Avatar"><br />
|
||||
Nitin Kumar
|
||||
</a>
|
||||
</td></tr></tbody></table>
|
||||
|
||||
### Committers
|
||||
|
||||
The people who review and fix bugs and help triage issues.
|
||||
|
||||
<table><tbody><tr><td align="center" valign="top" width="11%">
|
||||
<a href="https://github.com/JoshuaKGoldberg">
|
||||
<img src="https://github.com/JoshuaKGoldberg.png?s=75" width="75" height="75" alt="Josh Goldberg ✨'s Avatar"><br />
|
||||
Josh Goldberg ✨
|
||||
</a>
|
||||
</td><td align="center" valign="top" width="11%">
|
||||
<a href="https://github.com/Tanujkanti4441">
|
||||
<img src="https://github.com/Tanujkanti4441.png?s=75" width="75" height="75" alt="Tanuj Kanti's Avatar"><br />
|
||||
Tanuj Kanti
|
||||
</a>
|
||||
</td></tr></tbody></table>
|
||||
|
||||
### Website Team
|
||||
|
||||
Team members who focus specifically on eslint.org
|
||||
|
||||
<table><tbody><tr><td align="center" valign="top" width="11%">
|
||||
<a href="https://github.com/amareshsm">
|
||||
<img src="https://github.com/amareshsm.png?s=75" width="75" height="75" alt="Amaresh S M's Avatar"><br />
|
||||
Amaresh S M
|
||||
</a>
|
||||
</td><td align="center" valign="top" width="11%">
|
||||
<a href="https://github.com/harish-sethuraman">
|
||||
<img src="https://github.com/harish-sethuraman.png?s=75" width="75" height="75" alt="Strek's Avatar"><br />
|
||||
Strek
|
||||
</a>
|
||||
</td><td align="center" valign="top" width="11%">
|
||||
<a href="https://github.com/kecrily">
|
||||
<img src="https://github.com/kecrily.png?s=75" width="75" height="75" alt="Percy Ma's Avatar"><br />
|
||||
Percy Ma
|
||||
</a>
|
||||
</td></tr></tbody></table>
|
||||
|
||||
<!--teamend-->
|
||||
|
||||
<!-- NOTE: This section is autogenerated. Do not manually edit.-->
|
||||
<!--sponsorsstart-->
|
||||
|
||||
## Sponsors
|
||||
|
||||
The following companies, organizations, and individuals support ESLint's ongoing maintenance and development. [Become a Sponsor](https://eslint.org/donate)
|
||||
to get your logo on our READMEs and [website](https://eslint.org/sponsors).
|
||||
|
||||
<h3>Platinum Sponsors</h3>
|
||||
<p><a href="https://automattic.com"><img src="https://images.opencollective.com/automattic/d0ef3e1/logo.png" alt="Automattic" height="128"></a> <a href="https://www.airbnb.com/"><img src="https://images.opencollective.com/airbnb/d327d66/logo.png" alt="Airbnb" height="128"></a></p><h3>Gold Sponsors</h3>
|
||||
<p><a href="https://qlty.sh/"><img src="https://images.opencollective.com/qltysh/33d157d/logo.png" alt="Qlty Software" height="96"></a> <a href="https://trunk.io/"><img src="https://images.opencollective.com/trunkio/fb92d60/avatar.png" alt="trunk.io" height="96"></a> <a href="https://shopify.engineering/"><img src="https://avatars.githubusercontent.com/u/8085" alt="Shopify" height="96"></a></p><h3>Silver Sponsors</h3>
|
||||
<p><a href="https://vite.dev/"><img src="https://images.opencollective.com/vite/e6d15e1/logo.png" alt="Vite" height="64"></a> <a href="https://liftoff.io/"><img src="https://images.opencollective.com/liftoff/5c4fa84/logo.png" alt="Liftoff" height="64"></a> <a href="https://stackblitz.com"><img src="https://avatars.githubusercontent.com/u/28635252" alt="StackBlitz" height="64"></a></p><h3>Bronze Sponsors</h3>
|
||||
<p><a href="https://cybozu.co.jp/"><img src="https://images.opencollective.com/cybozu/933e46d/logo.png" alt="Cybozu" height="32"></a> <a href="https://www.crosswordsolver.org/anagram-solver/"><img src="https://images.opencollective.com/anagram-solver/2666271/logo.png" alt="Anagram Solver" height="32"></a> <a href="https://icons8.com/"><img src="https://images.opencollective.com/icons8/7fa1641/logo.png" alt="Icons8" height="32"></a> <a href="https://discord.com"><img src="https://images.opencollective.com/discordapp/f9645d9/logo.png" alt="Discord" height="32"></a> <a href="https://www.gitbook.com"><img src="https://avatars.githubusercontent.com/u/7111340" alt="GitBook" height="32"></a> <a href="https://nolebase.ayaka.io"><img src="https://avatars.githubusercontent.com/u/11081491" alt="Neko" height="32"></a> <a href="https://nx.dev"><img src="https://avatars.githubusercontent.com/u/23692104" alt="Nx" height="32"></a> <a href="https://opensource.mercedes-benz.com/"><img src="https://avatars.githubusercontent.com/u/34240465" alt="Mercedes-Benz Group" height="32"></a> <a href="https://herocoders.com"><img src="https://avatars.githubusercontent.com/u/37549774" alt="HeroCoders" height="32"></a> <a href="https://www.lambdatest.com"><img src="https://avatars.githubusercontent.com/u/171592363" alt="LambdaTest" height="32"></a></p>
|
||||
<h3>Technology Sponsors</h3>
|
||||
Technology sponsors allow us to use their products and services for free as part of a contribution to the open source ecosystem and our work.
|
||||
<p><a href="https://netlify.com"><img src="https://raw.githubusercontent.com/eslint/eslint.org/main/src/assets/images/techsponsors/netlify-icon.svg" alt="Netlify" height="32"></a> <a href="https://algolia.com"><img src="https://raw.githubusercontent.com/eslint/eslint.org/main/src/assets/images/techsponsors/algolia-icon.svg" alt="Algolia" height="32"></a> <a href="https://1password.com"><img src="https://raw.githubusercontent.com/eslint/eslint.org/main/src/assets/images/techsponsors/1password-icon.svg" alt="1Password" height="32"></a></p>
|
||||
|
||||
<!--sponsorsend-->
|
||||
|
||||
[tidelift]: https://tidelift.com/funding/github/npm/eslint
|
||||
[herodevs]: https://www.herodevs.com/support/eslint-nes?utm_source=ESLintWebsite&utm_medium=ESLintWebsite&utm_campaign=ESLintNES&utm_id=ESLintNES
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"names":["_remapping","data","require","mergeSourceMap","inputMap","map","sourceFileName","source","replace","found","result","remapping","rootless","s","ctx","sourceRoot","Object","assign"],"sources":["../../../src/transformation/file/merge-map.ts"],"sourcesContent":["type SourceMap = any;\nimport remapping from \"@ampproject/remapping\";\n\nexport default function mergeSourceMap(\n inputMap: SourceMap,\n map: SourceMap,\n sourceFileName: string,\n): SourceMap {\n // On win32 machines, the sourceFileName uses backslash paths like\n // `C:\\foo\\bar.js`. But sourcemaps are always posix paths, so we need to\n // normalize to regular slashes before we can merge (else we won't find the\n // source associated with our input map).\n // This mirrors code done while generating the output map at\n // https://github.com/babel/babel/blob/5c2fcadc9ae34fd20dd72b1111d5cf50476d700d/packages/babel-generator/src/source-map.ts#L102\n const source = sourceFileName.replace(/\\\\/g, \"/\");\n\n // Prevent an infinite recursion if one of the input map's sources has the\n // same resolved path as the input map. In the case, it would keep find the\n // input map, then get it's sources which will include a path like the input\n // map, on and on.\n let found = false;\n const result = remapping(rootless(map), (s, ctx) => {\n if (s === source && !found) {\n found = true;\n // We empty the source location, which will prevent the sourcemap from\n // becoming relative to the input's location. Eg, if we're transforming a\n // file 'foo/bar.js', and it is a transformation of a `baz.js` file in the\n // same directory, the expected output is just `baz.js`. Without this step,\n // it would become `foo/baz.js`.\n ctx.source = \"\";\n\n return rootless(inputMap);\n }\n\n return null;\n });\n\n if (typeof inputMap.sourceRoot === \"string\") {\n result.sourceRoot = inputMap.sourceRoot;\n }\n\n // remapping returns a SourceMap class type, but this breaks code downstream in\n // @babel/traverse and @babel/types that relies on data being plain objects.\n // When it encounters the sourcemap type it outputs a \"don't know how to turn\n // this value into a node\" error. As a result, we are converting the merged\n // sourcemap to a plain js object.\n return { ...result };\n}\n\nfunction rootless(map: SourceMap): SourceMap {\n return {\n ...map,\n\n // This is a bit hack. Remapping will create absolute sources in our\n // sourcemap, but we want to maintain sources relative to the sourceRoot.\n // We'll re-add the sourceRoot after remapping.\n sourceRoot: null,\n };\n}\n"],"mappings":";;;;;;AACA,SAAAA,WAAA;EAAA,MAAAC,IAAA,GAAAC,OAAA;EAAAF,UAAA,YAAAA,CAAA;IAAA,OAAAC,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AAEe,SAASE,cAAcA,CACpCC,QAAmB,EACnBC,GAAc,EACdC,cAAsB,EACX;EAOX,MAAMC,MAAM,GAAGD,cAAc,CAACE,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC;EAMjD,IAAIC,KAAK,GAAG,KAAK;EACjB,MAAMC,MAAM,GAAGC,WAAQA,CAAC,CAACC,QAAQ,CAACP,GAAG,CAAC,EAAE,CAACQ,CAAC,EAAEC,GAAG,KAAK;IAClD,IAAID,CAAC,KAAKN,MAAM,IAAI,CAACE,KAAK,EAAE;MAC1BA,KAAK,GAAG,IAAI;MAMZK,GAAG,CAACP,MAAM,GAAG,EAAE;MAEf,OAAOK,QAAQ,CAACR,QAAQ,CAAC;IAC3B;IAEA,OAAO,IAAI;EACb,CAAC,CAAC;EAEF,IAAI,OAAOA,QAAQ,CAACW,UAAU,KAAK,QAAQ,EAAE;IAC3CL,MAAM,CAACK,UAAU,GAAGX,QAAQ,CAACW,UAAU;EACzC;EAOA,OAAAC,MAAA,CAAAC,MAAA,KAAYP,MAAM;AACpB;AAEA,SAASE,QAAQA,CAACP,GAAc,EAAa;EAC3C,OAAAW,MAAA,CAAAC,MAAA,KACKZ,GAAG;IAKNU,UAAU,EAAE;EAAI;AAEpB;AAAC","ignoreList":[]}
|
||||
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"name": "use-sync-external-store",
|
||||
"description": "Backwards compatible shim for React's useSyncExternalStore. Works with any React that supports hooks.",
|
||||
"version": "1.5.0",
|
||||
"exports": {
|
||||
".": "./index.js",
|
||||
"./with-selector": "./with-selector.js",
|
||||
"./with-selector.js": "./with-selector.js",
|
||||
"./shim": {
|
||||
"react-native": "./shim/index.native.js",
|
||||
"default": "./shim/index.js"
|
||||
},
|
||||
"./shim/index.js": "./shim/index.js",
|
||||
"./shim/index.native": "./shim/index.native.js",
|
||||
"./shim/index.native.js": "./shim/index.native.js",
|
||||
"./shim/with-selector": "./shim/with-selector.js",
|
||||
"./shim/with-selector.js": "./shim/with-selector.js",
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/facebook/react.git",
|
||||
"directory": "packages/use-sync-external-store"
|
||||
},
|
||||
"files": [
|
||||
"LICENSE",
|
||||
"README.md",
|
||||
"index.js",
|
||||
"index.native.js",
|
||||
"with-selector.js",
|
||||
"with-selector.native.js",
|
||||
"shim/",
|
||||
"cjs/"
|
||||
],
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"react-17": "npm:react@^17",
|
||||
"react-dom-17": "npm:react-dom@^17"
|
||||
}
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,396 @@
|
||||
'use strict'
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const symbol = Symbol.for('BufferList')
|
||||
|
||||
function BufferList (buf) {
|
||||
if (!(this instanceof BufferList)) {
|
||||
return new BufferList(buf)
|
||||
}
|
||||
|
||||
BufferList._init.call(this, buf)
|
||||
}
|
||||
|
||||
BufferList._init = function _init (buf) {
|
||||
Object.defineProperty(this, symbol, { value: true })
|
||||
|
||||
this._bufs = []
|
||||
this.length = 0
|
||||
|
||||
if (buf) {
|
||||
this.append(buf)
|
||||
}
|
||||
}
|
||||
|
||||
BufferList.prototype._new = function _new (buf) {
|
||||
return new BufferList(buf)
|
||||
}
|
||||
|
||||
BufferList.prototype._offset = function _offset (offset) {
|
||||
if (offset === 0) {
|
||||
return [0, 0]
|
||||
}
|
||||
|
||||
let tot = 0
|
||||
|
||||
for (let i = 0; i < this._bufs.length; i++) {
|
||||
const _t = tot + this._bufs[i].length
|
||||
if (offset < _t || i === this._bufs.length - 1) {
|
||||
return [i, offset - tot]
|
||||
}
|
||||
tot = _t
|
||||
}
|
||||
}
|
||||
|
||||
BufferList.prototype._reverseOffset = function (blOffset) {
|
||||
const bufferId = blOffset[0]
|
||||
let offset = blOffset[1]
|
||||
|
||||
for (let i = 0; i < bufferId; i++) {
|
||||
offset += this._bufs[i].length
|
||||
}
|
||||
|
||||
return offset
|
||||
}
|
||||
|
||||
BufferList.prototype.get = function get (index) {
|
||||
if (index > this.length || index < 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const offset = this._offset(index)
|
||||
|
||||
return this._bufs[offset[0]][offset[1]]
|
||||
}
|
||||
|
||||
BufferList.prototype.slice = function slice (start, end) {
|
||||
if (typeof start === 'number' && start < 0) {
|
||||
start += this.length
|
||||
}
|
||||
|
||||
if (typeof end === 'number' && end < 0) {
|
||||
end += this.length
|
||||
}
|
||||
|
||||
return this.copy(null, 0, start, end)
|
||||
}
|
||||
|
||||
BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
|
||||
if (typeof srcStart !== 'number' || srcStart < 0) {
|
||||
srcStart = 0
|
||||
}
|
||||
|
||||
if (typeof srcEnd !== 'number' || srcEnd > this.length) {
|
||||
srcEnd = this.length
|
||||
}
|
||||
|
||||
if (srcStart >= this.length) {
|
||||
return dst || Buffer.alloc(0)
|
||||
}
|
||||
|
||||
if (srcEnd <= 0) {
|
||||
return dst || Buffer.alloc(0)
|
||||
}
|
||||
|
||||
const copy = !!dst
|
||||
const off = this._offset(srcStart)
|
||||
const len = srcEnd - srcStart
|
||||
let bytes = len
|
||||
let bufoff = (copy && dstStart) || 0
|
||||
let start = off[1]
|
||||
|
||||
// copy/slice everything
|
||||
if (srcStart === 0 && srcEnd === this.length) {
|
||||
if (!copy) {
|
||||
// slice, but full concat if multiple buffers
|
||||
return this._bufs.length === 1
|
||||
? this._bufs[0]
|
||||
: Buffer.concat(this._bufs, this.length)
|
||||
}
|
||||
|
||||
// copy, need to copy individual buffers
|
||||
for (let i = 0; i < this._bufs.length; i++) {
|
||||
this._bufs[i].copy(dst, bufoff)
|
||||
bufoff += this._bufs[i].length
|
||||
}
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
// easy, cheap case where it's a subset of one of the buffers
|
||||
if (bytes <= this._bufs[off[0]].length - start) {
|
||||
return copy
|
||||
? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
|
||||
: this._bufs[off[0]].slice(start, start + bytes)
|
||||
}
|
||||
|
||||
if (!copy) {
|
||||
// a slice, we need something to copy in to
|
||||
dst = Buffer.allocUnsafe(len)
|
||||
}
|
||||
|
||||
for (let i = off[0]; i < this._bufs.length; i++) {
|
||||
const l = this._bufs[i].length - start
|
||||
|
||||
if (bytes > l) {
|
||||
this._bufs[i].copy(dst, bufoff, start)
|
||||
bufoff += l
|
||||
} else {
|
||||
this._bufs[i].copy(dst, bufoff, start, start + bytes)
|
||||
bufoff += l
|
||||
break
|
||||
}
|
||||
|
||||
bytes -= l
|
||||
|
||||
if (start) {
|
||||
start = 0
|
||||
}
|
||||
}
|
||||
|
||||
// safeguard so that we don't return uninitialized memory
|
||||
if (dst.length > bufoff) return dst.slice(0, bufoff)
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
|
||||
start = start || 0
|
||||
end = typeof end !== 'number' ? this.length : end
|
||||
|
||||
if (start < 0) {
|
||||
start += this.length
|
||||
}
|
||||
|
||||
if (end < 0) {
|
||||
end += this.length
|
||||
}
|
||||
|
||||
if (start === end) {
|
||||
return this._new()
|
||||
}
|
||||
|
||||
const startOffset = this._offset(start)
|
||||
const endOffset = this._offset(end)
|
||||
const buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
|
||||
|
||||
if (endOffset[1] === 0) {
|
||||
buffers.pop()
|
||||
} else {
|
||||
buffers[buffers.length - 1] = buffers[buffers.length - 1].slice(0, endOffset[1])
|
||||
}
|
||||
|
||||
if (startOffset[1] !== 0) {
|
||||
buffers[0] = buffers[0].slice(startOffset[1])
|
||||
}
|
||||
|
||||
return this._new(buffers)
|
||||
}
|
||||
|
||||
BufferList.prototype.toString = function toString (encoding, start, end) {
|
||||
return this.slice(start, end).toString(encoding)
|
||||
}
|
||||
|
||||
BufferList.prototype.consume = function consume (bytes) {
|
||||
// first, normalize the argument, in accordance with how Buffer does it
|
||||
bytes = Math.trunc(bytes)
|
||||
// do nothing if not a positive number
|
||||
if (Number.isNaN(bytes) || bytes <= 0) return this
|
||||
|
||||
while (this._bufs.length) {
|
||||
if (bytes >= this._bufs[0].length) {
|
||||
bytes -= this._bufs[0].length
|
||||
this.length -= this._bufs[0].length
|
||||
this._bufs.shift()
|
||||
} else {
|
||||
this._bufs[0] = this._bufs[0].slice(bytes)
|
||||
this.length -= bytes
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
BufferList.prototype.duplicate = function duplicate () {
|
||||
const copy = this._new()
|
||||
|
||||
for (let i = 0; i < this._bufs.length; i++) {
|
||||
copy.append(this._bufs[i])
|
||||
}
|
||||
|
||||
return copy
|
||||
}
|
||||
|
||||
BufferList.prototype.append = function append (buf) {
|
||||
if (buf == null) {
|
||||
return this
|
||||
}
|
||||
|
||||
if (buf.buffer) {
|
||||
// append a view of the underlying ArrayBuffer
|
||||
this._appendBuffer(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength))
|
||||
} else if (Array.isArray(buf)) {
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
this.append(buf[i])
|
||||
}
|
||||
} else if (this._isBufferList(buf)) {
|
||||
// unwrap argument into individual BufferLists
|
||||
for (let i = 0; i < buf._bufs.length; i++) {
|
||||
this.append(buf._bufs[i])
|
||||
}
|
||||
} else {
|
||||
// coerce number arguments to strings, since Buffer(number) does
|
||||
// uninitialized memory allocation
|
||||
if (typeof buf === 'number') {
|
||||
buf = buf.toString()
|
||||
}
|
||||
|
||||
this._appendBuffer(Buffer.from(buf))
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
BufferList.prototype._appendBuffer = function appendBuffer (buf) {
|
||||
this._bufs.push(buf)
|
||||
this.length += buf.length
|
||||
}
|
||||
|
||||
BufferList.prototype.indexOf = function (search, offset, encoding) {
|
||||
if (encoding === undefined && typeof offset === 'string') {
|
||||
encoding = offset
|
||||
offset = undefined
|
||||
}
|
||||
|
||||
if (typeof search === 'function' || Array.isArray(search)) {
|
||||
throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.')
|
||||
} else if (typeof search === 'number') {
|
||||
search = Buffer.from([search])
|
||||
} else if (typeof search === 'string') {
|
||||
search = Buffer.from(search, encoding)
|
||||
} else if (this._isBufferList(search)) {
|
||||
search = search.slice()
|
||||
} else if (Array.isArray(search.buffer)) {
|
||||
search = Buffer.from(search.buffer, search.byteOffset, search.byteLength)
|
||||
} else if (!Buffer.isBuffer(search)) {
|
||||
search = Buffer.from(search)
|
||||
}
|
||||
|
||||
offset = Number(offset || 0)
|
||||
|
||||
if (isNaN(offset)) {
|
||||
offset = 0
|
||||
}
|
||||
|
||||
if (offset < 0) {
|
||||
offset = this.length + offset
|
||||
}
|
||||
|
||||
if (offset < 0) {
|
||||
offset = 0
|
||||
}
|
||||
|
||||
if (search.length === 0) {
|
||||
return offset > this.length ? this.length : offset
|
||||
}
|
||||
|
||||
const blOffset = this._offset(offset)
|
||||
let blIndex = blOffset[0] // index of which internal buffer we're working on
|
||||
let buffOffset = blOffset[1] // offset of the internal buffer we're working on
|
||||
|
||||
// scan over each buffer
|
||||
for (; blIndex < this._bufs.length; blIndex++) {
|
||||
const buff = this._bufs[blIndex]
|
||||
|
||||
while (buffOffset < buff.length) {
|
||||
const availableWindow = buff.length - buffOffset
|
||||
|
||||
if (availableWindow >= search.length) {
|
||||
const nativeSearchResult = buff.indexOf(search, buffOffset)
|
||||
|
||||
if (nativeSearchResult !== -1) {
|
||||
return this._reverseOffset([blIndex, nativeSearchResult])
|
||||
}
|
||||
|
||||
buffOffset = buff.length - search.length + 1 // end of native search window
|
||||
} else {
|
||||
const revOffset = this._reverseOffset([blIndex, buffOffset])
|
||||
|
||||
if (this._match(revOffset, search)) {
|
||||
return revOffset
|
||||
}
|
||||
|
||||
buffOffset++
|
||||
}
|
||||
}
|
||||
|
||||
buffOffset = 0
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
BufferList.prototype._match = function (offset, search) {
|
||||
if (this.length - offset < search.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
for (let searchOffset = 0; searchOffset < search.length; searchOffset++) {
|
||||
if (this.get(offset + searchOffset) !== search[searchOffset]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
;(function () {
|
||||
const methods = {
|
||||
readDoubleBE: 8,
|
||||
readDoubleLE: 8,
|
||||
readFloatBE: 4,
|
||||
readFloatLE: 4,
|
||||
readInt32BE: 4,
|
||||
readInt32LE: 4,
|
||||
readUInt32BE: 4,
|
||||
readUInt32LE: 4,
|
||||
readInt16BE: 2,
|
||||
readInt16LE: 2,
|
||||
readUInt16BE: 2,
|
||||
readUInt16LE: 2,
|
||||
readInt8: 1,
|
||||
readUInt8: 1,
|
||||
readIntBE: null,
|
||||
readIntLE: null,
|
||||
readUIntBE: null,
|
||||
readUIntLE: null
|
||||
}
|
||||
|
||||
for (const m in methods) {
|
||||
(function (m) {
|
||||
if (methods[m] === null) {
|
||||
BufferList.prototype[m] = function (offset, byteLength) {
|
||||
return this.slice(offset, offset + byteLength)[m](0, byteLength)
|
||||
}
|
||||
} else {
|
||||
BufferList.prototype[m] = function (offset = 0) {
|
||||
return this.slice(offset, offset + methods[m])[m](0)
|
||||
}
|
||||
}
|
||||
}(m))
|
||||
}
|
||||
}())
|
||||
|
||||
// Used internally by the class and also as an indicator of this object being
|
||||
// a `BufferList`. It's not possible to use `instanceof BufferList` in a browser
|
||||
// environment because there could be multiple different copies of the
|
||||
// BufferList class and some `BufferList`s might be `BufferList`s.
|
||||
BufferList.prototype._isBufferList = function _isBufferList (b) {
|
||||
return b instanceof BufferList || BufferList.isBufferList(b)
|
||||
}
|
||||
|
||||
BufferList.isBufferList = function isBufferList (b) {
|
||||
return b != null && b[symbol]
|
||||
}
|
||||
|
||||
module.exports = BufferList
|
||||
@@ -0,0 +1,13 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = buildMatchMemberExpression;
|
||||
var _matchesPattern = require("./matchesPattern.js");
|
||||
function buildMatchMemberExpression(match, allowPartial) {
|
||||
const parts = match.split(".");
|
||||
return member => (0, _matchesPattern.default)(member, parts, allowPartial);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=buildMatchMemberExpression.js.map
|
||||
@@ -0,0 +1,64 @@
|
||||
# yocto-queue [](https://bundlephobia.com/result?p=yocto-queue)
|
||||
|
||||
> Tiny queue data structure
|
||||
|
||||
You should use this package instead of an array if you do a lot of `Array#push()` and `Array#shift()` on large arrays, since `Array#shift()` has [linear time complexity](https://medium.com/@ariel.salem1989/an-easy-to-use-guide-to-big-o-time-complexity-5dcf4be8a444#:~:text=O(N)%E2%80%94Linear%20Time) *O(n)* while `Queue#dequeue()` has [constant time complexity](https://medium.com/@ariel.salem1989/an-easy-to-use-guide-to-big-o-time-complexity-5dcf4be8a444#:~:text=O(1)%20%E2%80%94%20Constant%20Time) *O(1)*. That makes a huge difference for large arrays.
|
||||
|
||||
> A [queue](https://en.wikipedia.org/wiki/Queue_(abstract_data_type)) is an ordered list of elements where an element is inserted at the end of the queue and is removed from the front of the queue. A queue works based on the first-in, first-out ([FIFO](https://en.wikipedia.org/wiki/FIFO_(computing_and_electronics))) principle.
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install yocto-queue
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const Queue = require('yocto-queue');
|
||||
|
||||
const queue = new Queue();
|
||||
|
||||
queue.enqueue('🦄');
|
||||
queue.enqueue('🌈');
|
||||
|
||||
console.log(queue.size);
|
||||
//=> 2
|
||||
|
||||
console.log(...queue);
|
||||
//=> '🦄 🌈'
|
||||
|
||||
console.log(queue.dequeue());
|
||||
//=> '🦄'
|
||||
|
||||
console.log(queue.dequeue());
|
||||
//=> '🌈'
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### `queue = new Queue()`
|
||||
|
||||
The instance is an [`Iterable`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols), which means you can iterate over the queue front to back with a “for…of” loop, or use spreading to convert the queue to an array. Don't do this unless you really need to though, since it's slow.
|
||||
|
||||
#### `.enqueue(value)`
|
||||
|
||||
Add a value to the queue.
|
||||
|
||||
#### `.dequeue()`
|
||||
|
||||
Remove the next value in the queue.
|
||||
|
||||
Returns the removed value or `undefined` if the queue is empty.
|
||||
|
||||
#### `.clear()`
|
||||
|
||||
Clear the queue.
|
||||
|
||||
#### `.size`
|
||||
|
||||
The size of the queue.
|
||||
|
||||
## Related
|
||||
|
||||
- [quick-lru](https://github.com/sindresorhus/quick-lru) - Simple “Least Recently Used” (LRU) cache
|
||||
@@ -0,0 +1,47 @@
|
||||
'use strict';
|
||||
const path = require('path');
|
||||
const Module = require('module');
|
||||
const fs = require('fs');
|
||||
|
||||
const resolveFrom = (fromDir, moduleId, silent) => {
|
||||
if (typeof fromDir !== 'string') {
|
||||
throw new TypeError(`Expected \`fromDir\` to be of type \`string\`, got \`${typeof fromDir}\``);
|
||||
}
|
||||
|
||||
if (typeof moduleId !== 'string') {
|
||||
throw new TypeError(`Expected \`moduleId\` to be of type \`string\`, got \`${typeof moduleId}\``);
|
||||
}
|
||||
|
||||
try {
|
||||
fromDir = fs.realpathSync(fromDir);
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
fromDir = path.resolve(fromDir);
|
||||
} else if (silent) {
|
||||
return null;
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const fromFile = path.join(fromDir, 'noop.js');
|
||||
|
||||
const resolveFileName = () => Module._resolveFilename(moduleId, {
|
||||
id: fromFile,
|
||||
filename: fromFile,
|
||||
paths: Module._nodeModulePaths(fromDir)
|
||||
});
|
||||
|
||||
if (silent) {
|
||||
try {
|
||||
return resolveFileName();
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return resolveFileName();
|
||||
};
|
||||
|
||||
module.exports = (fromDir, moduleId) => resolveFrom(fromDir, moduleId);
|
||||
module.exports.silent = (fromDir, moduleId) => resolveFrom(fromDir, moduleId, true);
|
||||
@@ -0,0 +1,63 @@
|
||||
import { __flush } from './scheduler'
|
||||
import type { AnyUpdater, Listener } from './types'
|
||||
|
||||
export interface StoreOptions<
|
||||
TState,
|
||||
TUpdater extends AnyUpdater = (cb: TState) => TState,
|
||||
> {
|
||||
/**
|
||||
* Replace the default update function with a custom one.
|
||||
*/
|
||||
updateFn?: (previous: TState) => (updater: TUpdater) => TState
|
||||
/**
|
||||
* Called when a listener subscribes to the store.
|
||||
*
|
||||
* @return a function to unsubscribe the listener
|
||||
*/
|
||||
onSubscribe?: (
|
||||
listener: Listener<TState>,
|
||||
store: Store<TState, TUpdater>,
|
||||
) => () => void
|
||||
/**
|
||||
* Called after the state has been updated, used to derive other state.
|
||||
*/
|
||||
onUpdate?: () => void
|
||||
}
|
||||
|
||||
export class Store<
|
||||
TState,
|
||||
TUpdater extends AnyUpdater = (cb: TState) => TState,
|
||||
> {
|
||||
listeners = new Set<Listener<TState>>()
|
||||
state: TState
|
||||
prevState: TState
|
||||
options?: StoreOptions<TState, TUpdater>
|
||||
|
||||
constructor(initialState: TState, options?: StoreOptions<TState, TUpdater>) {
|
||||
this.prevState = initialState
|
||||
this.state = initialState
|
||||
this.options = options
|
||||
}
|
||||
|
||||
subscribe = (listener: Listener<TState>) => {
|
||||
this.listeners.add(listener)
|
||||
const unsub = this.options?.onSubscribe?.(listener, this)
|
||||
return () => {
|
||||
this.listeners.delete(listener)
|
||||
unsub?.()
|
||||
}
|
||||
}
|
||||
|
||||
setState = (updater: TUpdater) => {
|
||||
this.prevState = this.state
|
||||
this.state = this.options?.updateFn
|
||||
? this.options.updateFn(this.prevState)(updater)
|
||||
: (updater as any)(this.prevState)
|
||||
|
||||
// Always run onUpdate, regardless of batching
|
||||
this.options?.onUpdate?.()
|
||||
|
||||
// Attempt to flush
|
||||
__flush(this as never)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,866 @@
|
||||
export interface Node {
|
||||
start: number
|
||||
end: number
|
||||
type: string
|
||||
range?: [number, number]
|
||||
loc?: SourceLocation | null
|
||||
}
|
||||
|
||||
export interface SourceLocation {
|
||||
source?: string | null
|
||||
start: Position
|
||||
end: Position
|
||||
}
|
||||
|
||||
export interface Position {
|
||||
/** 1-based */
|
||||
line: number
|
||||
/** 0-based */
|
||||
column: number
|
||||
}
|
||||
|
||||
export interface Identifier extends Node {
|
||||
type: "Identifier"
|
||||
name: string
|
||||
}
|
||||
|
||||
export interface Literal extends Node {
|
||||
type: "Literal"
|
||||
value?: string | boolean | null | number | RegExp | bigint
|
||||
raw?: string
|
||||
regex?: {
|
||||
pattern: string
|
||||
flags: string
|
||||
}
|
||||
bigint?: string
|
||||
}
|
||||
|
||||
export interface Program extends Node {
|
||||
type: "Program"
|
||||
body: Array<Statement | ModuleDeclaration>
|
||||
sourceType: "script" | "module"
|
||||
}
|
||||
|
||||
export interface Function extends Node {
|
||||
id?: Identifier | null
|
||||
params: Array<Pattern>
|
||||
body: BlockStatement | Expression
|
||||
generator: boolean
|
||||
expression: boolean
|
||||
async: boolean
|
||||
}
|
||||
|
||||
export interface ExpressionStatement extends Node {
|
||||
type: "ExpressionStatement"
|
||||
expression: Expression | Literal
|
||||
directive?: string
|
||||
}
|
||||
|
||||
export interface BlockStatement extends Node {
|
||||
type: "BlockStatement"
|
||||
body: Array<Statement>
|
||||
}
|
||||
|
||||
export interface EmptyStatement extends Node {
|
||||
type: "EmptyStatement"
|
||||
}
|
||||
|
||||
export interface DebuggerStatement extends Node {
|
||||
type: "DebuggerStatement"
|
||||
}
|
||||
|
||||
export interface WithStatement extends Node {
|
||||
type: "WithStatement"
|
||||
object: Expression
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface ReturnStatement extends Node {
|
||||
type: "ReturnStatement"
|
||||
argument?: Expression | null
|
||||
}
|
||||
|
||||
export interface LabeledStatement extends Node {
|
||||
type: "LabeledStatement"
|
||||
label: Identifier
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface BreakStatement extends Node {
|
||||
type: "BreakStatement"
|
||||
label?: Identifier | null
|
||||
}
|
||||
|
||||
export interface ContinueStatement extends Node {
|
||||
type: "ContinueStatement"
|
||||
label?: Identifier | null
|
||||
}
|
||||
|
||||
export interface IfStatement extends Node {
|
||||
type: "IfStatement"
|
||||
test: Expression
|
||||
consequent: Statement
|
||||
alternate?: Statement | null
|
||||
}
|
||||
|
||||
export interface SwitchStatement extends Node {
|
||||
type: "SwitchStatement"
|
||||
discriminant: Expression
|
||||
cases: Array<SwitchCase>
|
||||
}
|
||||
|
||||
export interface SwitchCase extends Node {
|
||||
type: "SwitchCase"
|
||||
test?: Expression | null
|
||||
consequent: Array<Statement>
|
||||
}
|
||||
|
||||
export interface ThrowStatement extends Node {
|
||||
type: "ThrowStatement"
|
||||
argument: Expression
|
||||
}
|
||||
|
||||
export interface TryStatement extends Node {
|
||||
type: "TryStatement"
|
||||
block: BlockStatement
|
||||
handler?: CatchClause | null
|
||||
finalizer?: BlockStatement | null
|
||||
}
|
||||
|
||||
export interface CatchClause extends Node {
|
||||
type: "CatchClause"
|
||||
param?: Pattern | null
|
||||
body: BlockStatement
|
||||
}
|
||||
|
||||
export interface WhileStatement extends Node {
|
||||
type: "WhileStatement"
|
||||
test: Expression
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface DoWhileStatement extends Node {
|
||||
type: "DoWhileStatement"
|
||||
body: Statement
|
||||
test: Expression
|
||||
}
|
||||
|
||||
export interface ForStatement extends Node {
|
||||
type: "ForStatement"
|
||||
init?: VariableDeclaration | Expression | null
|
||||
test?: Expression | null
|
||||
update?: Expression | null
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface ForInStatement extends Node {
|
||||
type: "ForInStatement"
|
||||
left: VariableDeclaration | Pattern
|
||||
right: Expression
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface FunctionDeclaration extends Function {
|
||||
type: "FunctionDeclaration"
|
||||
id: Identifier
|
||||
body: BlockStatement
|
||||
}
|
||||
|
||||
export interface VariableDeclaration extends Node {
|
||||
type: "VariableDeclaration"
|
||||
declarations: Array<VariableDeclarator>
|
||||
kind: "var" | "let" | "const"
|
||||
}
|
||||
|
||||
export interface VariableDeclarator extends Node {
|
||||
type: "VariableDeclarator"
|
||||
id: Pattern
|
||||
init?: Expression | null
|
||||
}
|
||||
|
||||
export interface ThisExpression extends Node {
|
||||
type: "ThisExpression"
|
||||
}
|
||||
|
||||
export interface ArrayExpression extends Node {
|
||||
type: "ArrayExpression"
|
||||
elements: Array<Expression | SpreadElement | null>
|
||||
}
|
||||
|
||||
export interface ObjectExpression extends Node {
|
||||
type: "ObjectExpression"
|
||||
properties: Array<Property | SpreadElement>
|
||||
}
|
||||
|
||||
export interface Property extends Node {
|
||||
type: "Property"
|
||||
key: Expression
|
||||
value: Expression
|
||||
kind: "init" | "get" | "set"
|
||||
method: boolean
|
||||
shorthand: boolean
|
||||
computed: boolean
|
||||
}
|
||||
|
||||
export interface FunctionExpression extends Function {
|
||||
type: "FunctionExpression"
|
||||
body: BlockStatement
|
||||
}
|
||||
|
||||
export interface UnaryExpression extends Node {
|
||||
type: "UnaryExpression"
|
||||
operator: UnaryOperator
|
||||
prefix: boolean
|
||||
argument: Expression
|
||||
}
|
||||
|
||||
export type UnaryOperator = "-" | "+" | "!" | "~" | "typeof" | "void" | "delete"
|
||||
|
||||
export interface UpdateExpression extends Node {
|
||||
type: "UpdateExpression"
|
||||
operator: UpdateOperator
|
||||
argument: Expression
|
||||
prefix: boolean
|
||||
}
|
||||
|
||||
export type UpdateOperator = "++" | "--"
|
||||
|
||||
export interface BinaryExpression extends Node {
|
||||
type: "BinaryExpression"
|
||||
operator: BinaryOperator
|
||||
left: Expression | PrivateIdentifier
|
||||
right: Expression
|
||||
}
|
||||
|
||||
export type BinaryOperator = "==" | "!=" | "===" | "!==" | "<" | "<=" | ">" | ">=" | "<<" | ">>" | ">>>" | "+" | "-" | "*" | "/" | "%" | "|" | "^" | "&" | "in" | "instanceof" | "**"
|
||||
|
||||
export interface AssignmentExpression extends Node {
|
||||
type: "AssignmentExpression"
|
||||
operator: AssignmentOperator
|
||||
left: Pattern
|
||||
right: Expression
|
||||
}
|
||||
|
||||
export type AssignmentOperator = "=" | "+=" | "-=" | "*=" | "/=" | "%=" | "<<=" | ">>=" | ">>>=" | "|=" | "^=" | "&=" | "**=" | "||=" | "&&=" | "??="
|
||||
|
||||
export interface LogicalExpression extends Node {
|
||||
type: "LogicalExpression"
|
||||
operator: LogicalOperator
|
||||
left: Expression
|
||||
right: Expression
|
||||
}
|
||||
|
||||
export type LogicalOperator = "||" | "&&" | "??"
|
||||
|
||||
export interface MemberExpression extends Node {
|
||||
type: "MemberExpression"
|
||||
object: Expression | Super
|
||||
property: Expression | PrivateIdentifier
|
||||
computed: boolean
|
||||
optional: boolean
|
||||
}
|
||||
|
||||
export interface ConditionalExpression extends Node {
|
||||
type: "ConditionalExpression"
|
||||
test: Expression
|
||||
alternate: Expression
|
||||
consequent: Expression
|
||||
}
|
||||
|
||||
export interface CallExpression extends Node {
|
||||
type: "CallExpression"
|
||||
callee: Expression | Super
|
||||
arguments: Array<Expression | SpreadElement>
|
||||
optional: boolean
|
||||
}
|
||||
|
||||
export interface NewExpression extends Node {
|
||||
type: "NewExpression"
|
||||
callee: Expression
|
||||
arguments: Array<Expression | SpreadElement>
|
||||
}
|
||||
|
||||
export interface SequenceExpression extends Node {
|
||||
type: "SequenceExpression"
|
||||
expressions: Array<Expression>
|
||||
}
|
||||
|
||||
export interface ForOfStatement extends Node {
|
||||
type: "ForOfStatement"
|
||||
left: VariableDeclaration | Pattern
|
||||
right: Expression
|
||||
body: Statement
|
||||
await: boolean
|
||||
}
|
||||
|
||||
export interface Super extends Node {
|
||||
type: "Super"
|
||||
}
|
||||
|
||||
export interface SpreadElement extends Node {
|
||||
type: "SpreadElement"
|
||||
argument: Expression
|
||||
}
|
||||
|
||||
export interface ArrowFunctionExpression extends Function {
|
||||
type: "ArrowFunctionExpression"
|
||||
}
|
||||
|
||||
export interface YieldExpression extends Node {
|
||||
type: "YieldExpression"
|
||||
argument?: Expression | null
|
||||
delegate: boolean
|
||||
}
|
||||
|
||||
export interface TemplateLiteral extends Node {
|
||||
type: "TemplateLiteral"
|
||||
quasis: Array<TemplateElement>
|
||||
expressions: Array<Expression>
|
||||
}
|
||||
|
||||
export interface TaggedTemplateExpression extends Node {
|
||||
type: "TaggedTemplateExpression"
|
||||
tag: Expression
|
||||
quasi: TemplateLiteral
|
||||
}
|
||||
|
||||
export interface TemplateElement extends Node {
|
||||
type: "TemplateElement"
|
||||
tail: boolean
|
||||
value: {
|
||||
cooked?: string | null
|
||||
raw: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface AssignmentProperty extends Node {
|
||||
type: "Property"
|
||||
key: Expression
|
||||
value: Pattern
|
||||
kind: "init"
|
||||
method: false
|
||||
shorthand: boolean
|
||||
computed: boolean
|
||||
}
|
||||
|
||||
export interface ObjectPattern extends Node {
|
||||
type: "ObjectPattern"
|
||||
properties: Array<AssignmentProperty | RestElement>
|
||||
}
|
||||
|
||||
export interface ArrayPattern extends Node {
|
||||
type: "ArrayPattern"
|
||||
elements: Array<Pattern | null>
|
||||
}
|
||||
|
||||
export interface RestElement extends Node {
|
||||
type: "RestElement"
|
||||
argument: Pattern
|
||||
}
|
||||
|
||||
export interface AssignmentPattern extends Node {
|
||||
type: "AssignmentPattern"
|
||||
left: Pattern
|
||||
right: Expression
|
||||
}
|
||||
|
||||
export interface Class extends Node {
|
||||
id?: Identifier | null
|
||||
superClass?: Expression | null
|
||||
body: ClassBody
|
||||
}
|
||||
|
||||
export interface ClassBody extends Node {
|
||||
type: "ClassBody"
|
||||
body: Array<MethodDefinition | PropertyDefinition | StaticBlock>
|
||||
}
|
||||
|
||||
export interface MethodDefinition extends Node {
|
||||
type: "MethodDefinition"
|
||||
key: Expression | PrivateIdentifier
|
||||
value: FunctionExpression
|
||||
kind: "constructor" | "method" | "get" | "set"
|
||||
computed: boolean
|
||||
static: boolean
|
||||
}
|
||||
|
||||
export interface ClassDeclaration extends Class {
|
||||
type: "ClassDeclaration"
|
||||
id: Identifier
|
||||
}
|
||||
|
||||
export interface ClassExpression extends Class {
|
||||
type: "ClassExpression"
|
||||
}
|
||||
|
||||
export interface MetaProperty extends Node {
|
||||
type: "MetaProperty"
|
||||
meta: Identifier
|
||||
property: Identifier
|
||||
}
|
||||
|
||||
export interface ImportDeclaration extends Node {
|
||||
type: "ImportDeclaration"
|
||||
specifiers: Array<ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier>
|
||||
source: Literal
|
||||
attributes: Array<ImportAttribute>
|
||||
}
|
||||
|
||||
export interface ImportSpecifier extends Node {
|
||||
type: "ImportSpecifier"
|
||||
imported: Identifier | Literal
|
||||
local: Identifier
|
||||
}
|
||||
|
||||
export interface ImportDefaultSpecifier extends Node {
|
||||
type: "ImportDefaultSpecifier"
|
||||
local: Identifier
|
||||
}
|
||||
|
||||
export interface ImportNamespaceSpecifier extends Node {
|
||||
type: "ImportNamespaceSpecifier"
|
||||
local: Identifier
|
||||
}
|
||||
|
||||
export interface ImportAttribute extends Node {
|
||||
type: "ImportAttribute"
|
||||
key: Identifier | Literal
|
||||
value: Literal
|
||||
}
|
||||
|
||||
export interface ExportNamedDeclaration extends Node {
|
||||
type: "ExportNamedDeclaration"
|
||||
declaration?: Declaration | null
|
||||
specifiers: Array<ExportSpecifier>
|
||||
source?: Literal | null
|
||||
attributes: Array<ImportAttribute>
|
||||
}
|
||||
|
||||
export interface ExportSpecifier extends Node {
|
||||
type: "ExportSpecifier"
|
||||
exported: Identifier | Literal
|
||||
local: Identifier | Literal
|
||||
}
|
||||
|
||||
export interface AnonymousFunctionDeclaration extends Function {
|
||||
type: "FunctionDeclaration"
|
||||
id: null
|
||||
body: BlockStatement
|
||||
}
|
||||
|
||||
export interface AnonymousClassDeclaration extends Class {
|
||||
type: "ClassDeclaration"
|
||||
id: null
|
||||
}
|
||||
|
||||
export interface ExportDefaultDeclaration extends Node {
|
||||
type: "ExportDefaultDeclaration"
|
||||
declaration: AnonymousFunctionDeclaration | FunctionDeclaration | AnonymousClassDeclaration | ClassDeclaration | Expression
|
||||
}
|
||||
|
||||
export interface ExportAllDeclaration extends Node {
|
||||
type: "ExportAllDeclaration"
|
||||
source: Literal
|
||||
exported?: Identifier | Literal | null
|
||||
attributes: Array<ImportAttribute>
|
||||
}
|
||||
|
||||
export interface AwaitExpression extends Node {
|
||||
type: "AwaitExpression"
|
||||
argument: Expression
|
||||
}
|
||||
|
||||
export interface ChainExpression extends Node {
|
||||
type: "ChainExpression"
|
||||
expression: MemberExpression | CallExpression
|
||||
}
|
||||
|
||||
export interface ImportExpression extends Node {
|
||||
type: "ImportExpression"
|
||||
source: Expression
|
||||
options: Expression | null
|
||||
}
|
||||
|
||||
export interface ParenthesizedExpression extends Node {
|
||||
type: "ParenthesizedExpression"
|
||||
expression: Expression
|
||||
}
|
||||
|
||||
export interface PropertyDefinition extends Node {
|
||||
type: "PropertyDefinition"
|
||||
key: Expression | PrivateIdentifier
|
||||
value?: Expression | null
|
||||
computed: boolean
|
||||
static: boolean
|
||||
}
|
||||
|
||||
export interface PrivateIdentifier extends Node {
|
||||
type: "PrivateIdentifier"
|
||||
name: string
|
||||
}
|
||||
|
||||
export interface StaticBlock extends Node {
|
||||
type: "StaticBlock"
|
||||
body: Array<Statement>
|
||||
}
|
||||
|
||||
export type Statement =
|
||||
| ExpressionStatement
|
||||
| BlockStatement
|
||||
| EmptyStatement
|
||||
| DebuggerStatement
|
||||
| WithStatement
|
||||
| ReturnStatement
|
||||
| LabeledStatement
|
||||
| BreakStatement
|
||||
| ContinueStatement
|
||||
| IfStatement
|
||||
| SwitchStatement
|
||||
| ThrowStatement
|
||||
| TryStatement
|
||||
| WhileStatement
|
||||
| DoWhileStatement
|
||||
| ForStatement
|
||||
| ForInStatement
|
||||
| ForOfStatement
|
||||
| Declaration
|
||||
|
||||
export type Declaration =
|
||||
| FunctionDeclaration
|
||||
| VariableDeclaration
|
||||
| ClassDeclaration
|
||||
|
||||
export type Expression =
|
||||
| Identifier
|
||||
| Literal
|
||||
| ThisExpression
|
||||
| ArrayExpression
|
||||
| ObjectExpression
|
||||
| FunctionExpression
|
||||
| UnaryExpression
|
||||
| UpdateExpression
|
||||
| BinaryExpression
|
||||
| AssignmentExpression
|
||||
| LogicalExpression
|
||||
| MemberExpression
|
||||
| ConditionalExpression
|
||||
| CallExpression
|
||||
| NewExpression
|
||||
| SequenceExpression
|
||||
| ArrowFunctionExpression
|
||||
| YieldExpression
|
||||
| TemplateLiteral
|
||||
| TaggedTemplateExpression
|
||||
| ClassExpression
|
||||
| MetaProperty
|
||||
| AwaitExpression
|
||||
| ChainExpression
|
||||
| ImportExpression
|
||||
| ParenthesizedExpression
|
||||
|
||||
export type Pattern =
|
||||
| Identifier
|
||||
| MemberExpression
|
||||
| ObjectPattern
|
||||
| ArrayPattern
|
||||
| RestElement
|
||||
| AssignmentPattern
|
||||
|
||||
export type ModuleDeclaration =
|
||||
| ImportDeclaration
|
||||
| ExportNamedDeclaration
|
||||
| ExportDefaultDeclaration
|
||||
| ExportAllDeclaration
|
||||
|
||||
export type AnyNode = Statement | Expression | Declaration | ModuleDeclaration | Literal | Program | SwitchCase | CatchClause | Property | Super | SpreadElement | TemplateElement | AssignmentProperty | ObjectPattern | ArrayPattern | RestElement | AssignmentPattern | ClassBody | MethodDefinition | MetaProperty | ImportAttribute | ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier | ExportSpecifier | AnonymousFunctionDeclaration | AnonymousClassDeclaration | PropertyDefinition | PrivateIdentifier | StaticBlock | VariableDeclarator
|
||||
|
||||
export function parse(input: string, options: Options): Program
|
||||
|
||||
export function parseExpressionAt(input: string, pos: number, options: Options): Expression
|
||||
|
||||
export function tokenizer(input: string, options: Options): {
|
||||
getToken(): Token
|
||||
[Symbol.iterator](): Iterator<Token>
|
||||
}
|
||||
|
||||
export type ecmaVersion = 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 2015 | 2016 | 2017 | 2018 | 2019 | 2020 | 2021 | 2022 | 2023 | 2024 | 2025 | "latest"
|
||||
|
||||
export interface Options {
|
||||
/**
|
||||
* `ecmaVersion` indicates the ECMAScript version to parse. Can be a
|
||||
* number, either in year (`2022`) or plain version number (`6`) form,
|
||||
* or `"latest"` (the latest the library supports). This influences
|
||||
* support for strict mode, the set of reserved words, and support for
|
||||
* new syntax features.
|
||||
*/
|
||||
ecmaVersion: ecmaVersion
|
||||
|
||||
/**
|
||||
* `sourceType` indicates the mode the code should be parsed in.
|
||||
* Can be either `"script"` or `"module"`. This influences global
|
||||
* strict mode and parsing of `import` and `export` declarations.
|
||||
*/
|
||||
sourceType?: "script" | "module"
|
||||
|
||||
/**
|
||||
* a callback that will be called when a semicolon is automatically inserted.
|
||||
* @param lastTokEnd the position of the comma as an offset
|
||||
* @param lastTokEndLoc location if {@link locations} is enabled
|
||||
*/
|
||||
onInsertedSemicolon?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
|
||||
|
||||
/**
|
||||
* similar to `onInsertedSemicolon`, but for trailing commas
|
||||
* @param lastTokEnd the position of the comma as an offset
|
||||
* @param lastTokEndLoc location if `locations` is enabled
|
||||
*/
|
||||
onTrailingComma?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
|
||||
|
||||
/**
|
||||
* By default, reserved words are only enforced if ecmaVersion >= 5.
|
||||
* Set `allowReserved` to a boolean value to explicitly turn this on
|
||||
* an off. When this option has the value "never", reserved words
|
||||
* and keywords can also not be used as property names.
|
||||
*/
|
||||
allowReserved?: boolean | "never"
|
||||
|
||||
/**
|
||||
* When enabled, a return at the top level is not considered an error.
|
||||
*/
|
||||
allowReturnOutsideFunction?: boolean
|
||||
|
||||
/**
|
||||
* When enabled, import/export statements are not constrained to
|
||||
* appearing at the top of the program, and an import.meta expression
|
||||
* in a script isn't considered an error.
|
||||
*/
|
||||
allowImportExportEverywhere?: boolean
|
||||
|
||||
/**
|
||||
* By default, `await` identifiers are allowed to appear at the top-level scope only if {@link ecmaVersion} >= 2022.
|
||||
* When enabled, await identifiers are allowed to appear at the top-level scope,
|
||||
* but they are still not allowed in non-async functions.
|
||||
*/
|
||||
allowAwaitOutsideFunction?: boolean
|
||||
|
||||
/**
|
||||
* When enabled, super identifiers are not constrained to
|
||||
* appearing in methods and do not raise an error when they appear elsewhere.
|
||||
*/
|
||||
allowSuperOutsideMethod?: boolean
|
||||
|
||||
/**
|
||||
* When enabled, hashbang directive in the beginning of file is
|
||||
* allowed and treated as a line comment. Enabled by default when
|
||||
* {@link ecmaVersion} >= 2023.
|
||||
*/
|
||||
allowHashBang?: boolean
|
||||
|
||||
/**
|
||||
* By default, the parser will verify that private properties are
|
||||
* only used in places where they are valid and have been declared.
|
||||
* Set this to false to turn such checks off.
|
||||
*/
|
||||
checkPrivateFields?: boolean
|
||||
|
||||
/**
|
||||
* When `locations` is on, `loc` properties holding objects with
|
||||
* `start` and `end` properties as {@link Position} objects will be attached to the
|
||||
* nodes.
|
||||
*/
|
||||
locations?: boolean
|
||||
|
||||
/**
|
||||
* a callback that will cause Acorn to call that export function with object in the same
|
||||
* format as tokens returned from `tokenizer().getToken()`. Note
|
||||
* that you are not allowed to call the parser from the
|
||||
* callback—that will corrupt its internal state.
|
||||
*/
|
||||
onToken?: ((token: Token) => void) | Token[]
|
||||
|
||||
|
||||
/**
|
||||
* This takes a export function or an array.
|
||||
*
|
||||
* When a export function is passed, Acorn will call that export function with `(block, text, start,
|
||||
* end)` parameters whenever a comment is skipped. `block` is a
|
||||
* boolean indicating whether this is a block (`/* *\/`) comment,
|
||||
* `text` is the content of the comment, and `start` and `end` are
|
||||
* character offsets that denote the start and end of the comment.
|
||||
* When the {@link locations} option is on, two more parameters are
|
||||
* passed, the full locations of {@link Position} export type of the start and
|
||||
* end of the comments.
|
||||
*
|
||||
* When a array is passed, each found comment of {@link Comment} export type is pushed to the array.
|
||||
*
|
||||
* Note that you are not allowed to call the
|
||||
* parser from the callback—that will corrupt its internal state.
|
||||
*/
|
||||
onComment?: ((
|
||||
isBlock: boolean, text: string, start: number, end: number, startLoc?: Position,
|
||||
endLoc?: Position
|
||||
) => void) | Comment[]
|
||||
|
||||
/**
|
||||
* Nodes have their start and end characters offsets recorded in
|
||||
* `start` and `end` properties (directly on the node, rather than
|
||||
* the `loc` object, which holds line/column data. To also add a
|
||||
* [semi-standardized][range] `range` property holding a `[start,
|
||||
* end]` array with the same numbers, set the `ranges` option to
|
||||
* `true`.
|
||||
*/
|
||||
ranges?: boolean
|
||||
|
||||
/**
|
||||
* It is possible to parse multiple files into a single AST by
|
||||
* passing the tree produced by parsing the first file as
|
||||
* `program` option in subsequent parses. This will add the
|
||||
* toplevel forms of the parsed file to the `Program` (top) node
|
||||
* of an existing parse tree.
|
||||
*/
|
||||
program?: Node
|
||||
|
||||
/**
|
||||
* When {@link locations} is on, you can pass this to record the source
|
||||
* file in every node's `loc` object.
|
||||
*/
|
||||
sourceFile?: string
|
||||
|
||||
/**
|
||||
* This value, if given, is stored in every node, whether {@link locations} is on or off.
|
||||
*/
|
||||
directSourceFile?: string
|
||||
|
||||
/**
|
||||
* When enabled, parenthesized expressions are represented by
|
||||
* (non-standard) ParenthesizedExpression nodes
|
||||
*/
|
||||
preserveParens?: boolean
|
||||
}
|
||||
|
||||
export class Parser {
|
||||
options: Options
|
||||
input: string
|
||||
|
||||
protected constructor(options: Options, input: string, startPos?: number)
|
||||
parse(): Program
|
||||
|
||||
static parse(input: string, options: Options): Program
|
||||
static parseExpressionAt(input: string, pos: number, options: Options): Expression
|
||||
static tokenizer(input: string, options: Options): {
|
||||
getToken(): Token
|
||||
[Symbol.iterator](): Iterator<Token>
|
||||
}
|
||||
static extend(...plugins: ((BaseParser: typeof Parser) => typeof Parser)[]): typeof Parser
|
||||
}
|
||||
|
||||
export const defaultOptions: Options
|
||||
|
||||
export function getLineInfo(input: string, offset: number): Position
|
||||
|
||||
export class TokenType {
|
||||
label: string
|
||||
keyword: string | undefined
|
||||
}
|
||||
|
||||
export const tokTypes: {
|
||||
num: TokenType
|
||||
regexp: TokenType
|
||||
string: TokenType
|
||||
name: TokenType
|
||||
privateId: TokenType
|
||||
eof: TokenType
|
||||
|
||||
bracketL: TokenType
|
||||
bracketR: TokenType
|
||||
braceL: TokenType
|
||||
braceR: TokenType
|
||||
parenL: TokenType
|
||||
parenR: TokenType
|
||||
comma: TokenType
|
||||
semi: TokenType
|
||||
colon: TokenType
|
||||
dot: TokenType
|
||||
question: TokenType
|
||||
questionDot: TokenType
|
||||
arrow: TokenType
|
||||
template: TokenType
|
||||
invalidTemplate: TokenType
|
||||
ellipsis: TokenType
|
||||
backQuote: TokenType
|
||||
dollarBraceL: TokenType
|
||||
|
||||
eq: TokenType
|
||||
assign: TokenType
|
||||
incDec: TokenType
|
||||
prefix: TokenType
|
||||
logicalOR: TokenType
|
||||
logicalAND: TokenType
|
||||
bitwiseOR: TokenType
|
||||
bitwiseXOR: TokenType
|
||||
bitwiseAND: TokenType
|
||||
equality: TokenType
|
||||
relational: TokenType
|
||||
bitShift: TokenType
|
||||
plusMin: TokenType
|
||||
modulo: TokenType
|
||||
star: TokenType
|
||||
slash: TokenType
|
||||
starstar: TokenType
|
||||
coalesce: TokenType
|
||||
|
||||
_break: TokenType
|
||||
_case: TokenType
|
||||
_catch: TokenType
|
||||
_continue: TokenType
|
||||
_debugger: TokenType
|
||||
_default: TokenType
|
||||
_do: TokenType
|
||||
_else: TokenType
|
||||
_finally: TokenType
|
||||
_for: TokenType
|
||||
_function: TokenType
|
||||
_if: TokenType
|
||||
_return: TokenType
|
||||
_switch: TokenType
|
||||
_throw: TokenType
|
||||
_try: TokenType
|
||||
_var: TokenType
|
||||
_const: TokenType
|
||||
_while: TokenType
|
||||
_with: TokenType
|
||||
_new: TokenType
|
||||
_this: TokenType
|
||||
_super: TokenType
|
||||
_class: TokenType
|
||||
_extends: TokenType
|
||||
_export: TokenType
|
||||
_import: TokenType
|
||||
_null: TokenType
|
||||
_true: TokenType
|
||||
_false: TokenType
|
||||
_in: TokenType
|
||||
_instanceof: TokenType
|
||||
_typeof: TokenType
|
||||
_void: TokenType
|
||||
_delete: TokenType
|
||||
}
|
||||
|
||||
export interface Comment {
|
||||
type: "Line" | "Block"
|
||||
value: string
|
||||
start: number
|
||||
end: number
|
||||
loc?: SourceLocation
|
||||
range?: [number, number]
|
||||
}
|
||||
|
||||
export class Token {
|
||||
type: TokenType
|
||||
start: number
|
||||
end: number
|
||||
loc?: SourceLocation
|
||||
range?: [number, number]
|
||||
}
|
||||
|
||||
export const version: string
|
||||
@@ -0,0 +1,35 @@
|
||||
'use strict';
|
||||
|
||||
var Type = require('../type');
|
||||
|
||||
function resolveYamlNull(data) {
|
||||
if (data === null) return true;
|
||||
|
||||
var max = data.length;
|
||||
|
||||
return (max === 1 && data === '~') ||
|
||||
(max === 4 && (data === 'null' || data === 'Null' || data === 'NULL'));
|
||||
}
|
||||
|
||||
function constructYamlNull() {
|
||||
return null;
|
||||
}
|
||||
|
||||
function isNull(object) {
|
||||
return object === null;
|
||||
}
|
||||
|
||||
module.exports = new Type('tag:yaml.org,2002:null', {
|
||||
kind: 'scalar',
|
||||
resolve: resolveYamlNull,
|
||||
construct: constructYamlNull,
|
||||
predicate: isNull,
|
||||
represent: {
|
||||
canonical: function () { return '~'; },
|
||||
lowercase: function () { return 'null'; },
|
||||
uppercase: function () { return 'NULL'; },
|
||||
camelcase: function () { return 'Null'; },
|
||||
empty: function () { return ''; }
|
||||
},
|
||||
defaultStyle: 'lowercase'
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1 @@
|
||||
{"version":3,"names":["_index","require","_index2","toComputedKey","node","key","property","computed","isIdentifier","stringLiteral","name"],"sources":["../../src/converters/toComputedKey.ts"],"sourcesContent":["import { isIdentifier } from \"../validators/generated/index.ts\";\nimport { stringLiteral } from \"../builders/generated/index.ts\";\nimport type * as t from \"../index.ts\";\n\nexport default function toComputedKey(\n node:\n | t.ObjectMember\n | t.ObjectProperty\n | t.ClassMethod\n | t.ClassProperty\n | t.ClassAccessorProperty\n | t.MemberExpression\n | t.OptionalMemberExpression,\n // @ts-expect-error todo(flow->ts): maybe check the type of node before accessing .key and .property\n key: t.Expression | t.PrivateName = node.key || node.property,\n) {\n if (!node.computed && isIdentifier(key)) key = stringLiteral(key.name);\n\n return key;\n}\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,OAAA,GAAAD,OAAA;AAGe,SAASE,aAAaA,CACnCC,IAO8B,EAE9BC,GAAiC,GAAGD,IAAI,CAACC,GAAG,IAAID,IAAI,CAACE,QAAQ,EAC7D;EACA,IAAI,CAACF,IAAI,CAACG,QAAQ,IAAI,IAAAC,mBAAY,EAACH,GAAG,CAAC,EAAEA,GAAG,GAAG,IAAAI,qBAAa,EAACJ,GAAG,CAACK,IAAI,CAAC;EAEtE,OAAOL,GAAG;AACZ","ignoreList":[]}
|
||||
Reference in New Issue
Block a user