update
This commit is contained in:
@@ -0,0 +1,30 @@
|
||||
import { useMatch } from './useMatch'
|
||||
import type {
|
||||
AnyRouter,
|
||||
RegisteredRouter,
|
||||
UseRouteContextBaseOptions,
|
||||
UseRouteContextOptions,
|
||||
UseRouteContextResult,
|
||||
} from '@tanstack/router-core'
|
||||
|
||||
export type UseRouteContextRoute<out TFrom> = <
|
||||
TRouter extends AnyRouter = RegisteredRouter,
|
||||
TSelected = unknown,
|
||||
>(
|
||||
opts?: UseRouteContextBaseOptions<TRouter, TFrom, true, TSelected>,
|
||||
) => UseRouteContextResult<TRouter, TFrom, true, TSelected>
|
||||
|
||||
export function useRouteContext<
|
||||
TRouter extends AnyRouter = RegisteredRouter,
|
||||
const TFrom extends string | undefined = undefined,
|
||||
TStrict extends boolean = true,
|
||||
TSelected = unknown,
|
||||
>(
|
||||
opts: UseRouteContextOptions<TRouter, TFrom, TStrict, TSelected>,
|
||||
): UseRouteContextResult<TRouter, TFrom, TStrict, TSelected> {
|
||||
return useMatch({
|
||||
...(opts as any),
|
||||
select: (match) =>
|
||||
opts.select ? opts.select(match.context) : match.context,
|
||||
}) as UseRouteContextResult<TRouter, TFrom, TStrict, TSelected>
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
import * as React from "./";
|
||||
export { Fragment } from "./";
|
||||
|
||||
export namespace JSX {
|
||||
type ElementType = React.JSX.ElementType;
|
||||
interface Element extends React.JSX.Element {}
|
||||
interface ElementClass extends React.JSX.ElementClass {}
|
||||
interface ElementAttributesProperty extends React.JSX.ElementAttributesProperty {}
|
||||
interface ElementChildrenAttribute extends React.JSX.ElementChildrenAttribute {}
|
||||
type LibraryManagedAttributes<C, P> = React.JSX.LibraryManagedAttributes<C, P>;
|
||||
interface IntrinsicAttributes extends React.JSX.IntrinsicAttributes {}
|
||||
interface IntrinsicClassAttributes<T> extends React.JSX.IntrinsicClassAttributes<T> {}
|
||||
interface IntrinsicElements extends React.JSX.IntrinsicElements {}
|
||||
}
|
||||
|
||||
export interface JSXSource {
|
||||
/**
|
||||
* The source file where the element originates from.
|
||||
*/
|
||||
fileName?: string | undefined;
|
||||
|
||||
/**
|
||||
* The line number where the element was created.
|
||||
*/
|
||||
lineNumber?: number | undefined;
|
||||
|
||||
/**
|
||||
* The column number where the element was created.
|
||||
*/
|
||||
columnNumber?: number | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a React element.
|
||||
*
|
||||
* You should not use this function directly. Use JSX and a transpiler instead.
|
||||
*/
|
||||
export function jsxDEV(
|
||||
type: React.ElementType,
|
||||
props: unknown,
|
||||
key: React.Key | undefined,
|
||||
isStatic: boolean,
|
||||
source?: JSXSource,
|
||||
self?: unknown,
|
||||
): React.ReactElement;
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"1":"B","2":"K D E F A mC"},B:{"1":"0 9 C L M G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I"},C:{"1":"0 1 2 3 4 5 6 7 8 9 nC LC J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC qC rC"},D:{"1":"0 1 2 3 4 5 6 7 8 9 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC"},E:{"1":"PB K D E F A B C L M G tC uC vC wC TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","2":"J sC SC"},F:{"1":"0 1 2 3 4 5 6 7 8 G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z","2":"F B C 4C 5C 6C 7C FC kC 8C GC"},G:{"1":"E AD BD CD DD ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","16":"SC 9C lC"},H:{"2":"WD"},I:{"1":"LC J I ZD aD lC bD cD","16":"XD YD"},J:{"1":"D A"},K:{"1":"H","2":"A B C FC kC GC"},L:{"1":"I"},M:{"1":"EC"},N:{"1":"B","2":"A"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"1":"oD"},R:{"1":"pD"},S:{"1":"qD rD"}},B:1,C:"PageTransitionEvent",D:true};
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,197 @@
|
||||
import { decodedMappings, traceSegment, TraceMap } from '@jridgewell/trace-mapping';
|
||||
import { GenMapping, maybeAddSegment, setSourceContent, setIgnore, toDecodedMap, toEncodedMap } from '@jridgewell/gen-mapping';
|
||||
|
||||
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
|
||||
const EMPTY_SOURCES = [];
|
||||
function SegmentObject(source, line, column, name, content, ignore) {
|
||||
return { source, line, column, name, content, ignore };
|
||||
}
|
||||
function Source(map, sources, source, content, ignore) {
|
||||
return {
|
||||
map,
|
||||
sources,
|
||||
source,
|
||||
content,
|
||||
ignore,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
function MapSource(map, sources) {
|
||||
return Source(map, sources, '', null, false);
|
||||
}
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
function OriginalSource(source, content, ignore) {
|
||||
return Source(null, EMPTY_SOURCES, source, content, ignore);
|
||||
}
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
function traceMappings(tree) {
|
||||
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
|
||||
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
|
||||
const gen = new GenMapping({ file: tree.map.file });
|
||||
const { sources: rootSources, map } = tree;
|
||||
const rootNames = map.names;
|
||||
const rootMappings = decodedMappings(map);
|
||||
for (let i = 0; i < rootMappings.length; i++) {
|
||||
const segments = rootMappings[i];
|
||||
for (let j = 0; j < segments.length; j++) {
|
||||
const segment = segments[j];
|
||||
const genCol = segment[0];
|
||||
let traced = SOURCELESS_MAPPING;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length !== 1) {
|
||||
const source = rootSources[segment[1]];
|
||||
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||
// respective segment into an original source.
|
||||
if (traced == null)
|
||||
continue;
|
||||
}
|
||||
const { column, line, name, content, source, ignore } = traced;
|
||||
maybeAddSegment(gen, i, genCol, source, line, column, name);
|
||||
if (source && content != null)
|
||||
setSourceContent(gen, source, content);
|
||||
if (ignore)
|
||||
setIgnore(gen, source, true);
|
||||
}
|
||||
}
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
function originalPositionFor(source, line, column, name) {
|
||||
if (!source.map) {
|
||||
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
|
||||
}
|
||||
const segment = traceSegment(source.map, line, column);
|
||||
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||
if (segment == null)
|
||||
return null;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length === 1)
|
||||
return SOURCELESS_MAPPING;
|
||||
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||
}
|
||||
|
||||
function asArray(value) {
|
||||
if (Array.isArray(value))
|
||||
return value;
|
||||
return [value];
|
||||
}
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
function buildSourceMapTree(input, loader) {
|
||||
const maps = asArray(input).map((m) => new TraceMap(m, ''));
|
||||
const map = maps.pop();
|
||||
for (let i = 0; i < maps.length; i++) {
|
||||
if (maps[i].sources.length > 1) {
|
||||
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||
'Did you specify these with the most recent transformation maps first?');
|
||||
}
|
||||
}
|
||||
let tree = build(map, loader, '', 0);
|
||||
for (let i = maps.length - 1; i >= 0; i--) {
|
||||
tree = MapSource(maps[i], [tree]);
|
||||
}
|
||||
return tree;
|
||||
}
|
||||
function build(map, loader, importer, importerDepth) {
|
||||
const { resolvedSources, sourcesContent, ignoreList } = map;
|
||||
const depth = importerDepth + 1;
|
||||
const children = resolvedSources.map((sourceFile, i) => {
|
||||
// The loading context gives the loader more information about why this file is being loaded
|
||||
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||
// an unmodified source file.
|
||||
const ctx = {
|
||||
importer,
|
||||
depth,
|
||||
source: sourceFile || '',
|
||||
content: undefined,
|
||||
ignore: undefined,
|
||||
};
|
||||
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||
// TODO: We should eventually support async loading of sourcemap files.
|
||||
const sourceMap = loader(ctx.source, ctx);
|
||||
const { source, content, ignore } = ctx;
|
||||
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||
if (sourceMap)
|
||||
return build(new TraceMap(sourceMap, source), loader, source, depth);
|
||||
// Else, it's an unmodified source file.
|
||||
// The contents of this unmodified source file can be overridden via the loader context,
|
||||
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||
// the importing sourcemap's `sourcesContent` field.
|
||||
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
|
||||
return OriginalSource(source, sourceContent, ignored);
|
||||
});
|
||||
return MapSource(map, children);
|
||||
}
|
||||
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
class SourceMap {
|
||||
constructor(map, options) {
|
||||
const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);
|
||||
this.version = out.version; // SourceMap spec says this should be first.
|
||||
this.file = out.file;
|
||||
this.mappings = out.mappings;
|
||||
this.names = out.names;
|
||||
this.ignoreList = out.ignoreList;
|
||||
this.sourceRoot = out.sourceRoot;
|
||||
this.sources = out.sources;
|
||||
if (!options.excludeContent) {
|
||||
this.sourcesContent = out.sourcesContent;
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
function remapping(input, loader, options) {
|
||||
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||
const tree = buildSourceMapTree(input, loader);
|
||||
return new SourceMap(traceMappings(tree), opts);
|
||||
}
|
||||
|
||||
export { remapping as default };
|
||||
//# sourceMappingURL=remapping.mjs.map
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"names":["reservedWords","keyword","strict","strictBind","keywords","Set","reservedWordsStrictSet","reservedWordsStrictBindSet","isReservedWord","word","inModule","isStrictReservedWord","has","isStrictBindOnlyReservedWord","isStrictBindReservedWord","isKeyword"],"sources":["../src/keyword.ts"],"sourcesContent":["const reservedWords = {\n keyword: [\n \"break\",\n \"case\",\n \"catch\",\n \"continue\",\n \"debugger\",\n \"default\",\n \"do\",\n \"else\",\n \"finally\",\n \"for\",\n \"function\",\n \"if\",\n \"return\",\n \"switch\",\n \"throw\",\n \"try\",\n \"var\",\n \"const\",\n \"while\",\n \"with\",\n \"new\",\n \"this\",\n \"super\",\n \"class\",\n \"extends\",\n \"export\",\n \"import\",\n \"null\",\n \"true\",\n \"false\",\n \"in\",\n \"instanceof\",\n \"typeof\",\n \"void\",\n \"delete\",\n ],\n strict: [\n \"implements\",\n \"interface\",\n \"let\",\n \"package\",\n \"private\",\n \"protected\",\n \"public\",\n \"static\",\n \"yield\",\n ],\n strictBind: [\"eval\", \"arguments\"],\n};\nconst keywords = new Set(reservedWords.keyword);\nconst reservedWordsStrictSet = new Set(reservedWords.strict);\nconst reservedWordsStrictBindSet = new Set(reservedWords.strictBind);\n\n/**\n * Checks if word is a reserved word in non-strict mode\n */\nexport function isReservedWord(word: string, inModule: boolean): boolean {\n return (inModule && word === \"await\") || word === \"enum\";\n}\n\n/**\n * Checks if word is a reserved word in non-binding strict mode\n *\n * Includes non-strict reserved words\n */\nexport function isStrictReservedWord(word: string, inModule: boolean): boolean {\n return isReservedWord(word, inModule) || reservedWordsStrictSet.has(word);\n}\n\n/**\n * Checks if word is a reserved word in binding strict mode, but it is allowed as\n * a normal identifier.\n */\nexport function isStrictBindOnlyReservedWord(word: string): boolean {\n return reservedWordsStrictBindSet.has(word);\n}\n\n/**\n * Checks if word is a reserved word in binding strict mode\n *\n * Includes non-strict reserved words and non-binding strict reserved words\n */\nexport function isStrictBindReservedWord(\n word: string,\n inModule: boolean,\n): boolean {\n return (\n isStrictReservedWord(word, inModule) || isStrictBindOnlyReservedWord(word)\n );\n}\n\nexport function isKeyword(word: string): boolean {\n return keywords.has(word);\n}\n"],"mappings":";;;;;;;;;;AAAA,MAAMA,aAAa,GAAG;EACpBC,OAAO,EAAE,CACP,OAAO,EACP,MAAM,EACN,OAAO,EACP,UAAU,EACV,UAAU,EACV,SAAS,EACT,IAAI,EACJ,MAAM,EACN,SAAS,EACT,KAAK,EACL,UAAU,EACV,IAAI,EACJ,QAAQ,EACR,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,OAAO,EACP,OAAO,EACP,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,EACP,OAAO,EACP,SAAS,EACT,QAAQ,EACR,QAAQ,EACR,MAAM,EACN,MAAM,EACN,OAAO,EACP,IAAI,EACJ,YAAY,EACZ,QAAQ,EACR,MAAM,EACN,QAAQ,CACT;EACDC,MAAM,EAAE,CACN,YAAY,EACZ,WAAW,EACX,KAAK,EACL,SAAS,EACT,SAAS,EACT,WAAW,EACX,QAAQ,EACR,QAAQ,EACR,OAAO,CACR;EACDC,UAAU,EAAE,CAAC,MAAM,EAAE,WAAW;AAClC,CAAC;AACD,MAAMC,QAAQ,GAAG,IAAIC,GAAG,CAACL,aAAa,CAACC,OAAO,CAAC;AAC/C,MAAMK,sBAAsB,GAAG,IAAID,GAAG,CAACL,aAAa,CAACE,MAAM,CAAC;AAC5D,MAAMK,0BAA0B,GAAG,IAAIF,GAAG,CAACL,aAAa,CAACG,UAAU,CAAC;AAK7D,SAASK,cAAcA,CAACC,IAAY,EAAEC,QAAiB,EAAW;EACvE,OAAQA,QAAQ,IAAID,IAAI,KAAK,OAAO,IAAKA,IAAI,KAAK,MAAM;AAC1D;AAOO,SAASE,oBAAoBA,CAACF,IAAY,EAAEC,QAAiB,EAAW;EAC7E,OAAOF,cAAc,CAACC,IAAI,EAAEC,QAAQ,CAAC,IAAIJ,sBAAsB,CAACM,GAAG,CAACH,IAAI,CAAC;AAC3E;AAMO,SAASI,4BAA4BA,CAACJ,IAAY,EAAW;EAClE,OAAOF,0BAA0B,CAACK,GAAG,CAACH,IAAI,CAAC;AAC7C;AAOO,SAASK,wBAAwBA,CACtCL,IAAY,EACZC,QAAiB,EACR;EACT,OACEC,oBAAoB,CAACF,IAAI,EAAEC,QAAQ,CAAC,IAAIG,4BAA4B,CAACJ,IAAI,CAAC;AAE9E;AAEO,SAASM,SAASA,CAACN,IAAY,EAAW;EAC/C,OAAOL,QAAQ,CAACQ,GAAG,CAACH,IAAI,CAAC;AAC3B","ignoreList":[]}
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2":"K D E F A B mC"},B:{"1":"0 9 x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I","2":"C L M G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w"},C:{"1":"0 9 v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"1 2 3 4 5 6 7 8 nC LC J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u qC rC"},D:{"1":"0 9 x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","2":"1 2 3 4 5 6 7 8 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w"},E:{"1":"eC fC 2C KC gC hC iC jC 3C","2":"J PB K D E F A B C L M G sC SC tC uC vC wC TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC"},F:{"1":"0 j k l m n o p q r s t u v w x y z","2":"1 2 3 4 5 6 7 8 F B C G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i 4C 5C 6C 7C FC kC 8C GC"},G:{"1":"eC fC VD KC gC hC iC jC","2":"E SC 9C lC AD BD CD DD ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC"},H:{"2":"WD"},I:{"1":"I","2":"LC J XD YD ZD aD lC bD cD"},J:{"2":"D A"},K:{"2":"A B C H FC kC GC"},L:{"1":"I"},M:{"1":"EC"},N:{"2":"A B"},O:{"1":"HC"},P:{"1":"4 5 6 7 8","2":"1 2 3 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"16":"oD"},R:{"16":"pD"},S:{"2":"qD","16":"rD"}},B:5,C:"WebAssembly Extended Constant Expressions",D:false};
|
||||
@@ -0,0 +1,51 @@
|
||||
# isexe
|
||||
|
||||
Minimal module to check if a file is executable, and a normal file.
|
||||
|
||||
Uses `fs.stat` and tests against the `PATHEXT` environment variable on
|
||||
Windows.
|
||||
|
||||
## USAGE
|
||||
|
||||
```javascript
|
||||
var isexe = require('isexe')
|
||||
isexe('some-file-name', function (err, isExe) {
|
||||
if (err) {
|
||||
console.error('probably file does not exist or something', err)
|
||||
} else if (isExe) {
|
||||
console.error('this thing can be run')
|
||||
} else {
|
||||
console.error('cannot be run')
|
||||
}
|
||||
})
|
||||
|
||||
// same thing but synchronous, throws errors
|
||||
var isExe = isexe.sync('some-file-name')
|
||||
|
||||
// treat errors as just "not executable"
|
||||
isexe('maybe-missing-file', { ignoreErrors: true }, callback)
|
||||
var isExe = isexe.sync('maybe-missing-file', { ignoreErrors: true })
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### `isexe(path, [options], [callback])`
|
||||
|
||||
Check if the path is executable. If no callback provided, and a
|
||||
global `Promise` object is available, then a Promise will be returned.
|
||||
|
||||
Will raise whatever errors may be raised by `fs.stat`, unless
|
||||
`options.ignoreErrors` is set to true.
|
||||
|
||||
### `isexe.sync(path, [options])`
|
||||
|
||||
Same as `isexe` but returns the value and throws any errors raised.
|
||||
|
||||
### Options
|
||||
|
||||
* `ignoreErrors` Treat all errors as "no, this is not executable", but
|
||||
don't raise them.
|
||||
* `uid` Number to use as the user id
|
||||
* `gid` Number to use as the group id
|
||||
* `pathExt` List of path extensions to use instead of `PATHEXT`
|
||||
environment variable on Windows.
|
||||
@@ -0,0 +1,95 @@
|
||||
(*
|
||||
Copyright (c) 2015-present, Facebook, Inc.
|
||||
|
||||
This source code is licensed under the MIT license found in the
|
||||
LICENSE file at
|
||||
https://github.com/facebookincubator/create-react-app/blob/master/LICENSE
|
||||
*)
|
||||
|
||||
property targetTab: null
|
||||
property targetTabIndex: -1
|
||||
property targetWindow: null
|
||||
property theProgram: "Google Chrome"
|
||||
|
||||
on run argv
|
||||
set theURL to item 1 of argv
|
||||
|
||||
-- Allow requested program to be optional,
|
||||
-- default to Google Chrome
|
||||
if (count of argv) > 1 then
|
||||
set theProgram to item 2 of argv
|
||||
end if
|
||||
|
||||
using terms from application "Google Chrome"
|
||||
tell application theProgram
|
||||
|
||||
if (count every window) = 0 then
|
||||
make new window
|
||||
end if
|
||||
|
||||
-- 1: Looking for tab running debugger
|
||||
-- then, Reload debugging tab if found
|
||||
-- then return
|
||||
set found to my lookupTabWithUrl(theURL)
|
||||
if found then
|
||||
set targetWindow's active tab index to targetTabIndex
|
||||
tell targetTab to reload
|
||||
tell targetWindow to activate
|
||||
set index of targetWindow to 1
|
||||
return
|
||||
end if
|
||||
|
||||
-- 2: Looking for Empty tab
|
||||
-- In case debugging tab was not found
|
||||
-- We try to find an empty tab instead
|
||||
set found to my lookupTabWithUrl("chrome://newtab/")
|
||||
if found then
|
||||
set targetWindow's active tab index to targetTabIndex
|
||||
set URL of targetTab to theURL
|
||||
tell targetWindow to activate
|
||||
return
|
||||
end if
|
||||
|
||||
-- 3: Create new tab
|
||||
-- both debugging and empty tab were not found
|
||||
-- make a new tab with url
|
||||
tell window 1
|
||||
activate
|
||||
make new tab with properties {URL:theURL}
|
||||
end tell
|
||||
end tell
|
||||
end using terms from
|
||||
end run
|
||||
|
||||
-- Function:
|
||||
-- Lookup tab with given url
|
||||
-- if found, store tab, index, and window in properties
|
||||
-- (properties were declared on top of file)
|
||||
on lookupTabWithUrl(lookupUrl)
|
||||
using terms from application "Google Chrome"
|
||||
tell application theProgram
|
||||
-- Find a tab with the given url
|
||||
set found to false
|
||||
set theTabIndex to -1
|
||||
repeat with theWindow in every window
|
||||
set theTabIndex to 0
|
||||
repeat with theTab in every tab of theWindow
|
||||
set theTabIndex to theTabIndex + 1
|
||||
if (theTab's URL as string) contains lookupUrl then
|
||||
-- assign tab, tab index, and window to properties
|
||||
set targetTab to theTab
|
||||
set targetTabIndex to theTabIndex
|
||||
set targetWindow to theWindow
|
||||
set found to true
|
||||
exit repeat
|
||||
end if
|
||||
end repeat
|
||||
|
||||
if found then
|
||||
exit repeat
|
||||
end if
|
||||
end repeat
|
||||
end tell
|
||||
end using terms from
|
||||
return found
|
||||
end lookupTabWithUrl
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"1":"A B","2":"K D E F mC"},B:{"1":"0 9 C L M G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I"},C:{"1":"0 1 2 3 4 5 6 7 8 9 K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"nC LC J PB qC rC"},D:{"1":"0 1 2 3 4 5 6 7 8 9 F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","2":"J PB K D E"},E:{"1":"K D E F A B C L M G tC uC vC wC TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","2":"J PB sC SC"},F:{"1":"0 1 2 3 4 5 6 7 8 G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z GC","2":"F B C 4C 5C 6C 7C FC kC 8C"},G:{"1":"E AD BD CD DD ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","2":"SC 9C lC"},H:{"1":"WD"},I:{"1":"LC J I aD lC bD cD","2":"XD YD ZD"},J:{"1":"A","2":"D"},K:{"1":"H GC","2":"A B C FC kC"},L:{"1":"I"},M:{"1":"EC"},N:{"1":"A B"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"1":"oD"},R:{"1":"pD"},S:{"1":"qD rD"}},B:5,C:"matchMedia",D:true};
|
||||
Reference in New Issue
Block a user