update
This commit is contained in:
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -0,0 +1,50 @@
|
||||
export type OnProgressParameters = import("./display/api").OnProgressParameters;
|
||||
export type PDFDocumentLoadingTask = import("./display/api").PDFDocumentLoadingTask;
|
||||
export type PDFDocumentProxy = import("./display/api").PDFDocumentProxy;
|
||||
export type PDFPageProxy = import("./display/api").PDFPageProxy;
|
||||
export type RenderTask = import("./display/api").RenderTask;
|
||||
export type PageViewport = import("./display/display_utils").PageViewport;
|
||||
import { AbortException } from "./shared/util.js";
|
||||
import { AnnotationEditorLayer } from "./display/editor/annotation_editor_layer.js";
|
||||
import { AnnotationEditorParamsType } from "./shared/util.js";
|
||||
import { AnnotationEditorType } from "./shared/util.js";
|
||||
import { AnnotationEditorUIManager } from "./display/editor/tools.js";
|
||||
import { AnnotationLayer } from "./display/annotation_layer.js";
|
||||
import { AnnotationMode } from "./shared/util.js";
|
||||
import { build } from "./display/api.js";
|
||||
import { ColorPicker } from "./display/editor/color_picker.js";
|
||||
import { createValidAbsoluteUrl } from "./shared/util.js";
|
||||
import { DOMSVGFactory } from "./display/svg_factory.js";
|
||||
import { DrawLayer } from "./display/draw_layer.js";
|
||||
import { FeatureTest } from "./shared/util.js";
|
||||
import { fetchData } from "./display/display_utils.js";
|
||||
import { getDocument } from "./display/api.js";
|
||||
import { getFilenameFromUrl } from "./display/display_utils.js";
|
||||
import { getPdfFilenameFromUrl } from "./display/display_utils.js";
|
||||
import { getXfaPageViewport } from "./display/display_utils.js";
|
||||
import { GlobalWorkerOptions } from "./display/worker_options.js";
|
||||
import { ImageKind } from "./shared/util.js";
|
||||
import { InvalidPDFException } from "./shared/util.js";
|
||||
import { isDataScheme } from "./display/display_utils.js";
|
||||
import { isPdfFile } from "./display/display_utils.js";
|
||||
import { MissingPDFException } from "./shared/util.js";
|
||||
import { noContextMenu } from "./display/display_utils.js";
|
||||
import { normalizeUnicode } from "./shared/util.js";
|
||||
import { OPS } from "./shared/util.js";
|
||||
import { OutputScale } from "./display/display_utils.js";
|
||||
import { PasswordResponses } from "./shared/util.js";
|
||||
import { PDFDataRangeTransport } from "./display/api.js";
|
||||
import { PDFDateString } from "./display/display_utils.js";
|
||||
import { PDFWorker } from "./display/api.js";
|
||||
import { PermissionFlag } from "./shared/util.js";
|
||||
import { PixelsPerInch } from "./display/display_utils.js";
|
||||
import { RenderingCancelledException } from "./display/display_utils.js";
|
||||
import { setLayerDimensions } from "./display/display_utils.js";
|
||||
import { shadow } from "./shared/util.js";
|
||||
import { TextLayer } from "./display/text_layer.js";
|
||||
import { UnexpectedResponseException } from "./shared/util.js";
|
||||
import { Util } from "./shared/util.js";
|
||||
import { VerbosityLevel } from "./shared/util.js";
|
||||
import { version } from "./display/api.js";
|
||||
import { XfaLayer } from "./display/xfa_layer.js";
|
||||
export { AbortException, AnnotationEditorLayer, AnnotationEditorParamsType, AnnotationEditorType, AnnotationEditorUIManager, AnnotationLayer, AnnotationMode, build, ColorPicker, createValidAbsoluteUrl, DOMSVGFactory, DrawLayer, FeatureTest, fetchData, getDocument, getFilenameFromUrl, getPdfFilenameFromUrl, getXfaPageViewport, GlobalWorkerOptions, ImageKind, InvalidPDFException, isDataScheme, isPdfFile, MissingPDFException, noContextMenu, normalizeUnicode, OPS, OutputScale, PasswordResponses, PDFDataRangeTransport, PDFDateString, PDFWorker, PermissionFlag, PixelsPerInch, RenderingCancelledException, setLayerDimensions, shadow, TextLayer, UnexpectedResponseException, Util, VerbosityLevel, version, XfaLayer };
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"names":["_index","require","_cleanJSXElementLiteralChild","buildChildren","node","elements","i","children","length","child","isJSXText","cleanJSXElementLiteralChild","isJSXExpressionContainer","expression","isJSXEmptyExpression","push"],"sources":["../../../src/builders/react/buildChildren.ts"],"sourcesContent":["import {\n isJSXText,\n isJSXExpressionContainer,\n isJSXEmptyExpression,\n} from \"../../validators/generated/index.ts\";\nimport cleanJSXElementLiteralChild from \"../../utils/react/cleanJSXElementLiteralChild.ts\";\nimport type * as t from \"../../index.ts\";\n\ntype ReturnedChild =\n | t.JSXSpreadChild\n | t.JSXElement\n | t.JSXFragment\n | t.Expression;\n\nexport default function buildChildren(\n node: t.JSXElement | t.JSXFragment,\n): ReturnedChild[] {\n const elements = [];\n\n for (let i = 0; i < node.children.length; i++) {\n let child: any = node.children[i];\n\n if (isJSXText(child)) {\n cleanJSXElementLiteralChild(child, elements);\n continue;\n }\n\n if (isJSXExpressionContainer(child)) child = child.expression;\n if (isJSXEmptyExpression(child)) continue;\n\n elements.push(child);\n }\n\n return elements;\n}\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAKA,IAAAC,4BAAA,GAAAD,OAAA;AASe,SAASE,aAAaA,CACnCC,IAAkC,EACjB;EACjB,MAAMC,QAAQ,GAAG,EAAE;EAEnB,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,IAAI,CAACG,QAAQ,CAACC,MAAM,EAAEF,CAAC,EAAE,EAAE;IAC7C,IAAIG,KAAU,GAAGL,IAAI,CAACG,QAAQ,CAACD,CAAC,CAAC;IAEjC,IAAI,IAAAI,gBAAS,EAACD,KAAK,CAAC,EAAE;MACpB,IAAAE,oCAA2B,EAACF,KAAK,EAAEJ,QAAQ,CAAC;MAC5C;IACF;IAEA,IAAI,IAAAO,+BAAwB,EAACH,KAAK,CAAC,EAAEA,KAAK,GAAGA,KAAK,CAACI,UAAU;IAC7D,IAAI,IAAAC,2BAAoB,EAACL,KAAK,CAAC,EAAE;IAEjCJ,QAAQ,CAACU,IAAI,CAACN,KAAK,CAAC;EACtB;EAEA,OAAOJ,QAAQ;AACjB","ignoreList":[]}
|
||||
@@ -0,0 +1 @@
|
||||
export { SourceNode } from '..';
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2":"K D E F A B mC"},B:{"1":"N O P","2":"C L M","260":"G","513":"0 9 Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I"},C:{"1":"0 9 sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"1 2 3 4 5 6 7 8 nC LC J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB qC rC","194":"pB qB rB"},D:{"1":"vB MC wB NC xB yB zB","2":"1 2 3 4 5 6 7 8 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB","260":"oB pB qB rB sB tB uB","513":"0 9 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC"},E:{"1":"L M G GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","2":"J PB K D E F A B C sC SC tC uC vC wC TC FC"},F:{"1":"iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB","2":"1 2 3 4 5 6 7 8 F B C G N O P QB RB SB TB UB VB WB XB YB ZB aB 4C 5C 6C 7C FC kC 8C GC","260":"bB cB dB eB fB gB hB","513":"0 zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z"},G:{"1":"LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","2":"E SC 9C lC AD BD CD DD ED FD GD HD ID JD KD"},H:{"2":"WD"},I:{"2":"LC J XD YD ZD aD lC bD cD","513":"I"},J:{"2":"D A"},K:{"2":"A B C FC kC GC","513":"H"},L:{"1":"I"},M:{"1":"EC"},N:{"2":"A B"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 fD gD hD TC iD jD kD lD mD IC JC KC nD","2":"J","260":"dD eD"},Q:{"513":"oD"},R:{"1":"pD"},S:{"1":"rD","2":"qD"}},B:5,C:"IntersectionObserver",D:true};
|
||||
Binary file not shown.
@@ -0,0 +1,76 @@
|
||||
module.exports = {
|
||||
"39": "0.20",
|
||||
"40": "0.21",
|
||||
"41": "0.21",
|
||||
"42": "0.25",
|
||||
"43": "0.27",
|
||||
"44": "0.30",
|
||||
"45": "0.31",
|
||||
"47": "0.36",
|
||||
"49": "0.37",
|
||||
"50": "1.1",
|
||||
"51": "1.2",
|
||||
"52": "1.3",
|
||||
"53": "1.4",
|
||||
"54": "1.4",
|
||||
"56": "1.6",
|
||||
"58": "1.7",
|
||||
"59": "1.8",
|
||||
"61": "2.0",
|
||||
"66": "3.0",
|
||||
"69": "4.0",
|
||||
"72": "5.0",
|
||||
"73": "5.0",
|
||||
"76": "6.0",
|
||||
"78": "7.0",
|
||||
"79": "8.0",
|
||||
"80": "8.0",
|
||||
"82": "9.0",
|
||||
"83": "9.0",
|
||||
"84": "10.0",
|
||||
"85": "10.0",
|
||||
"86": "11.0",
|
||||
"87": "11.0",
|
||||
"89": "12.0",
|
||||
"90": "13.0",
|
||||
"91": "13.0",
|
||||
"92": "14.0",
|
||||
"93": "14.0",
|
||||
"94": "15.0",
|
||||
"95": "16.0",
|
||||
"96": "16.0",
|
||||
"98": "17.0",
|
||||
"99": "18.0",
|
||||
"100": "18.0",
|
||||
"102": "19.0",
|
||||
"103": "20.0",
|
||||
"104": "20.0",
|
||||
"105": "21.0",
|
||||
"106": "21.0",
|
||||
"107": "22.0",
|
||||
"108": "22.0",
|
||||
"110": "23.0",
|
||||
"111": "24.0",
|
||||
"112": "24.0",
|
||||
"114": "25.0",
|
||||
"116": "26.0",
|
||||
"118": "27.0",
|
||||
"119": "28.0",
|
||||
"120": "28.0",
|
||||
"121": "29.0",
|
||||
"122": "29.0",
|
||||
"123": "30.0",
|
||||
"124": "30.0",
|
||||
"125": "31.0",
|
||||
"126": "31.0",
|
||||
"127": "32.0",
|
||||
"128": "32.0",
|
||||
"129": "33.0",
|
||||
"130": "33.0",
|
||||
"131": "34.0",
|
||||
"132": "34.0",
|
||||
"133": "35.0",
|
||||
"134": "35.0",
|
||||
"135": "36.0",
|
||||
"136": "36.0"
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,155 @@
|
||||
import { Derived } from './derived'
|
||||
import type { Store } from './store'
|
||||
|
||||
/**
|
||||
* This is here to solve the pyramid dependency problem where:
|
||||
* A
|
||||
* / \
|
||||
* B C
|
||||
* \ /
|
||||
* D
|
||||
*
|
||||
* Where we deeply traverse this tree, how do we avoid D being recomputed twice; once when B is updated, once when C is.
|
||||
*
|
||||
* To solve this, we create linkedDeps that allows us to sync avoid writes to the state until all of the deps have been
|
||||
* resolved.
|
||||
*
|
||||
* This is a record of stores, because derived stores are not able to write values to, but stores are
|
||||
*/
|
||||
export const __storeToDerived = new WeakMap<
|
||||
Store<unknown>,
|
||||
Set<Derived<unknown>>
|
||||
>()
|
||||
export const __derivedToStore = new WeakMap<
|
||||
Derived<unknown>,
|
||||
Set<Store<unknown>>
|
||||
>()
|
||||
|
||||
export const __depsThatHaveWrittenThisTick = {
|
||||
current: [] as Array<Derived<unknown> | Store<unknown>>,
|
||||
}
|
||||
|
||||
let __isFlushing = false
|
||||
let __batchDepth = 0
|
||||
const __pendingUpdates = new Set<Store<unknown>>()
|
||||
// Add a map to store initial values before batch
|
||||
const __initialBatchValues = new Map<Store<unknown>, unknown>()
|
||||
|
||||
function __flush_internals(relatedVals: Set<Derived<unknown>>) {
|
||||
// First sort deriveds by dependency order
|
||||
const sorted = Array.from(relatedVals).sort((a, b) => {
|
||||
// If a depends on b, b should go first
|
||||
if (a instanceof Derived && a.options.deps.includes(b)) return 1
|
||||
// If b depends on a, a should go first
|
||||
if (b instanceof Derived && b.options.deps.includes(a)) return -1
|
||||
return 0
|
||||
})
|
||||
|
||||
for (const derived of sorted) {
|
||||
if (__depsThatHaveWrittenThisTick.current.includes(derived)) {
|
||||
continue
|
||||
}
|
||||
|
||||
__depsThatHaveWrittenThisTick.current.push(derived)
|
||||
derived.recompute()
|
||||
|
||||
const stores = __derivedToStore.get(derived)
|
||||
if (stores) {
|
||||
for (const store of stores) {
|
||||
const relatedLinkedDerivedVals = __storeToDerived.get(store)
|
||||
if (!relatedLinkedDerivedVals) continue
|
||||
__flush_internals(relatedLinkedDerivedVals)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function __notifyListeners(store: Store<unknown>) {
|
||||
store.listeners.forEach((listener) =>
|
||||
listener({
|
||||
prevVal: store.prevState as never,
|
||||
currentVal: store.state as never,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
function __notifyDerivedListeners(derived: Derived<unknown>) {
|
||||
derived.listeners.forEach((listener) =>
|
||||
listener({
|
||||
prevVal: derived.prevState as never,
|
||||
currentVal: derived.state as never,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @private only to be called from `Store` on write
|
||||
*/
|
||||
export function __flush(store: Store<unknown>) {
|
||||
// If we're starting a batch, store the initial values
|
||||
if (__batchDepth > 0 && !__initialBatchValues.has(store)) {
|
||||
__initialBatchValues.set(store, store.prevState)
|
||||
}
|
||||
|
||||
__pendingUpdates.add(store)
|
||||
|
||||
if (__batchDepth > 0) return
|
||||
if (__isFlushing) return
|
||||
|
||||
try {
|
||||
__isFlushing = true
|
||||
|
||||
while (__pendingUpdates.size > 0) {
|
||||
const stores = Array.from(__pendingUpdates)
|
||||
__pendingUpdates.clear()
|
||||
|
||||
// First notify listeners with updated values
|
||||
for (const store of stores) {
|
||||
// Use initial batch values for prevState if we have them
|
||||
const prevState = __initialBatchValues.get(store) ?? store.prevState
|
||||
store.prevState = prevState
|
||||
__notifyListeners(store)
|
||||
}
|
||||
|
||||
// Then update all derived values
|
||||
for (const store of stores) {
|
||||
const derivedVals = __storeToDerived.get(store)
|
||||
if (!derivedVals) continue
|
||||
|
||||
__depsThatHaveWrittenThisTick.current.push(store)
|
||||
__flush_internals(derivedVals)
|
||||
}
|
||||
|
||||
// Notify derived listeners after recomputing
|
||||
for (const store of stores) {
|
||||
const derivedVals = __storeToDerived.get(store)
|
||||
if (!derivedVals) continue
|
||||
|
||||
for (const derived of derivedVals) {
|
||||
__notifyDerivedListeners(derived)
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
__isFlushing = false
|
||||
__depsThatHaveWrittenThisTick.current = []
|
||||
__initialBatchValues.clear()
|
||||
}
|
||||
}
|
||||
|
||||
export function batch(fn: () => void) {
|
||||
__batchDepth++
|
||||
try {
|
||||
fn()
|
||||
} finally {
|
||||
__batchDepth--
|
||||
if (__batchDepth === 0) {
|
||||
const pendingUpdateToFlush = Array.from(__pendingUpdates)[0] as
|
||||
| Store<unknown>
|
||||
| undefined
|
||||
if (pendingUpdateToFlush) {
|
||||
__flush(pendingUpdateToFlush) // Trigger flush of all pending updates
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"1":"A B","2":"K D E F mC"},B:{"1":"0 9 C L M G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I"},C:{"1":"0 1 2 3 4 5 6 7 8 9 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC rC","2":"nC LC qC"},D:{"1":"0 1 2 3 4 5 6 7 8 9 E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","2":"J PB K D"},E:{"1":"K D E F A B C L M G tC uC vC wC TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","2":"J sC SC","132":"PB"},F:{"1":"0 1 2 3 4 5 6 7 8 G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z","2":"F B C 4C 5C 6C 7C FC kC 8C GC"},G:{"1":"E AD BD CD DD ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","2":"SC 9C lC"},H:{"2":"WD"},I:{"1":"LC J I aD lC bD cD","2":"XD YD ZD"},J:{"1":"D A"},K:{"1":"H","2":"A B C FC kC GC"},L:{"1":"I"},M:{"1":"EC"},N:{"1":"A B"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"1":"oD"},R:{"1":"pD"},S:{"1":"qD rD"}},B:1,C:"async attribute for external scripts",D:true};
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"names":["_getPrototypeOf","require","_isNativeReflectConstruct","_possibleConstructorReturn","_createSuper","Derived","hasNativeReflectConstruct","isNativeReflectConstruct","_createSuperInternal","Super","getPrototypeOf","result","NewTarget","constructor","Reflect","construct","arguments","apply","possibleConstructorReturn"],"sources":["../../src/helpers/createSuper.js"],"sourcesContent":["/* @minVersion 7.9.0 */\n\nimport getPrototypeOf from \"getPrototypeOf\";\nimport isNativeReflectConstruct from \"isNativeReflectConstruct\";\nimport possibleConstructorReturn from \"possibleConstructorReturn\";\n\nexport default function _createSuper(Derived) {\n var hasNativeReflectConstruct = isNativeReflectConstruct();\n\n return function _createSuperInternal() {\n var Super = getPrototypeOf(Derived),\n result;\n if (hasNativeReflectConstruct) {\n // NOTE: This doesn't work if this.__proto__.constructor has been modified.\n var NewTarget = getPrototypeOf(this).constructor;\n result = Reflect.construct(Super, arguments, NewTarget);\n } else {\n result = Super.apply(this, arguments);\n }\n return possibleConstructorReturn(this, result);\n };\n}\n"],"mappings":";;;;;;AAEA,IAAAA,eAAA,GAAAC,OAAA;AACA,IAAAC,yBAAA,GAAAD,OAAA;AACA,IAAAE,0BAAA,GAAAF,OAAA;AAEe,SAASG,YAAYA,CAACC,OAAO,EAAE;EAC5C,IAAIC,yBAAyB,GAAGC,yBAAwB,CAAC,CAAC;EAE1D,OAAO,SAASC,oBAAoBA,CAAA,EAAG;IACrC,IAAIC,KAAK,GAAGC,eAAc,CAACL,OAAO,CAAC;MACjCM,MAAM;IACR,IAAIL,yBAAyB,EAAE;MAE7B,IAAIM,SAAS,GAAGF,eAAc,CAAC,IAAI,CAAC,CAACG,WAAW;MAChDF,MAAM,GAAGG,OAAO,CAACC,SAAS,CAACN,KAAK,EAAEO,SAAS,EAAEJ,SAAS,CAAC;IACzD,CAAC,MAAM;MACLD,MAAM,GAAGF,KAAK,CAACQ,KAAK,CAAC,IAAI,EAAED,SAAS,CAAC;IACvC;IACA,OAAOE,0BAAyB,CAAC,IAAI,EAAEP,MAAM,CAAC;EAChD,CAAC;AACH","ignoreList":[]}
|
||||
@@ -0,0 +1,69 @@
|
||||
'use strict';
|
||||
|
||||
var parse = require('../');
|
||||
var test = require('tape');
|
||||
|
||||
test('numeric short args', function (t) {
|
||||
t.plan(2);
|
||||
t.deepEqual(parse(['-n123']), { n: 123, _: [] });
|
||||
t.deepEqual(
|
||||
parse(['-123', '456']),
|
||||
{ 1: true, 2: true, 3: 456, _: [] }
|
||||
);
|
||||
});
|
||||
|
||||
test('short', function (t) {
|
||||
t.deepEqual(
|
||||
parse(['-b']),
|
||||
{ b: true, _: [] },
|
||||
'short boolean'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse(['foo', 'bar', 'baz']),
|
||||
{ _: ['foo', 'bar', 'baz'] },
|
||||
'bare'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse(['-cats']),
|
||||
{ c: true, a: true, t: true, s: true, _: [] },
|
||||
'group'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse(['-cats', 'meow']),
|
||||
{ c: true, a: true, t: true, s: 'meow', _: [] },
|
||||
'short group next'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse(['-h', 'localhost']),
|
||||
{ h: 'localhost', _: [] },
|
||||
'short capture'
|
||||
);
|
||||
t.deepEqual(
|
||||
parse(['-h', 'localhost', '-p', '555']),
|
||||
{ h: 'localhost', p: 555, _: [] },
|
||||
'short captures'
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('mixed short bool and capture', function (t) {
|
||||
t.same(
|
||||
parse(['-h', 'localhost', '-fp', '555', 'script.js']),
|
||||
{
|
||||
f: true, p: 555, h: 'localhost',
|
||||
_: ['script.js'],
|
||||
}
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('short and long', function (t) {
|
||||
t.deepEqual(
|
||||
parse(['-h', 'localhost', '-fp', '555', 'script.js']),
|
||||
{
|
||||
f: true, p: 555, h: 'localhost',
|
||||
_: ['script.js'],
|
||||
}
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
@@ -0,0 +1,41 @@
|
||||
{
|
||||
"name": "tar-fs",
|
||||
"version": "2.1.2",
|
||||
"description": "filesystem bindings for tar-stream",
|
||||
"dependencies": {
|
||||
"chownr": "^1.1.1",
|
||||
"mkdirp-classic": "^0.5.2",
|
||||
"pump": "^3.0.0",
|
||||
"tar-stream": "^2.1.4"
|
||||
},
|
||||
"keywords": [
|
||||
"tar",
|
||||
"fs",
|
||||
"file",
|
||||
"tarball",
|
||||
"directory",
|
||||
"stream"
|
||||
],
|
||||
"devDependencies": {
|
||||
"rimraf": "^2.6.3",
|
||||
"standard": "^13.0.1",
|
||||
"tape": "^4.9.2"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "standard && tape test/index.js"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/mafintosh/tar-fs/issues"
|
||||
},
|
||||
"homepage": "https://github.com/mafintosh/tar-fs",
|
||||
"main": "index.js",
|
||||
"directories": {
|
||||
"test": "test"
|
||||
},
|
||||
"author": "Mathias Buus",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/mafintosh/tar-fs.git"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
/**
|
||||
* @fileoverview Define the cursor which limits the number of tokens.
|
||||
* @author Toru Nagashima
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const DecorativeCursor = require("./decorative-cursor");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Exports
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* The decorative cursor which limits the number of tokens.
|
||||
*/
|
||||
module.exports = class LimitCursor extends DecorativeCursor {
|
||||
/**
|
||||
* Initializes this cursor.
|
||||
* @param {Cursor} cursor The cursor to be decorated.
|
||||
* @param {number} count The count of tokens this cursor iterates.
|
||||
*/
|
||||
constructor(cursor, count) {
|
||||
super(cursor);
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
moveNext() {
|
||||
if (this.count > 0) {
|
||||
this.count -= 1;
|
||||
return super.moveNext();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,77 @@
|
||||
{
|
||||
"name": "@humanwhocodes/retry",
|
||||
"version": "0.4.2",
|
||||
"description": "A utility to retry failed async methods.",
|
||||
"type": "module",
|
||||
"main": "dist/retrier.cjs",
|
||||
"module": "dist/retrier.js",
|
||||
"types": "dist/retrier.d.ts",
|
||||
"exports": {
|
||||
"require": {
|
||||
"types": "./dist/retrier.d.cts",
|
||||
"default": "./dist/retrier.cjs"
|
||||
},
|
||||
"import": {
|
||||
"types": "./dist/retrier.d.ts",
|
||||
"default": "./dist/retrier.js"
|
||||
}
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=18.18"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"gitHooks": {
|
||||
"pre-commit": "lint-staged"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.js": [
|
||||
"eslint --fix"
|
||||
]
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/nzakas"
|
||||
},
|
||||
"scripts": {
|
||||
"build:cts-types": "node -e \"fs.copyFileSync('dist/retrier.d.ts', 'dist/retrier.d.cts')\"",
|
||||
"build": "rollup -c && tsc && npm run build:cts-types",
|
||||
"prepare": "npm run build",
|
||||
"lint": "eslint src/ tests/",
|
||||
"pretest": "npm run build",
|
||||
"test:unit": "mocha tests/retrier.test.js",
|
||||
"test:build": "node tests/pkg.test.cjs && node tests/pkg.test.mjs",
|
||||
"test:jsr": "npx jsr@latest publish --dry-run",
|
||||
"test:emfile": "node tools/check-emfile-handling.js",
|
||||
"test": "npm run test:unit && npm run test:build"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/humanwhocodes/retry.git"
|
||||
},
|
||||
"keywords": [
|
||||
"nodejs",
|
||||
"retry",
|
||||
"async",
|
||||
"promises"
|
||||
],
|
||||
"author": "Nicholas C. Zaks",
|
||||
"license": "Apache-2.0",
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^8.49.0",
|
||||
"@rollup/plugin-terser": "0.4.4",
|
||||
"@tsconfig/node16": "^16.1.1",
|
||||
"@types/mocha": "^10.0.3",
|
||||
"@types/node": "20.12.6",
|
||||
"eslint": "^8.21.0",
|
||||
"lint-staged": "15.2.1",
|
||||
"mocha": "^10.3.0",
|
||||
"rollup": "3.29.4",
|
||||
"typescript": "5.4.4",
|
||||
"yorkie": "2.0.0"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,179 @@
|
||||
/**
|
||||
* @fileoverview Rule to enforce consistent naming of "this" context variables
|
||||
* @author Raphael Pigulla
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/** @type {import('../shared/types').Rule} */
|
||||
module.exports = {
|
||||
meta: {
|
||||
type: "suggestion",
|
||||
|
||||
docs: {
|
||||
description:
|
||||
"Enforce consistent naming when capturing the current execution context",
|
||||
recommended: false,
|
||||
frozen: true,
|
||||
url: "https://eslint.org/docs/latest/rules/consistent-this",
|
||||
},
|
||||
|
||||
schema: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "string",
|
||||
minLength: 1,
|
||||
},
|
||||
uniqueItems: true,
|
||||
},
|
||||
|
||||
defaultOptions: ["that"],
|
||||
|
||||
messages: {
|
||||
aliasNotAssignedToThis:
|
||||
"Designated alias '{{name}}' is not assigned to 'this'.",
|
||||
unexpectedAlias: "Unexpected alias '{{name}}' for 'this'.",
|
||||
},
|
||||
},
|
||||
|
||||
create(context) {
|
||||
const aliases = context.options;
|
||||
const sourceCode = context.sourceCode;
|
||||
|
||||
/**
|
||||
* Reports that a variable declarator or assignment expression is assigning
|
||||
* a non-'this' value to the specified alias.
|
||||
* @param {ASTNode} node The assigning node.
|
||||
* @param {string} name the name of the alias that was incorrectly used.
|
||||
* @returns {void}
|
||||
*/
|
||||
function reportBadAssignment(node, name) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: "aliasNotAssignedToThis",
|
||||
data: { name },
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that an assignment to an identifier only assigns 'this' to the
|
||||
* appropriate alias, and the alias is only assigned to 'this'.
|
||||
* @param {ASTNode} node The assigning node.
|
||||
* @param {Identifier} name The name of the variable assigned to.
|
||||
* @param {Expression} value The value of the assignment.
|
||||
* @returns {void}
|
||||
*/
|
||||
function checkAssignment(node, name, value) {
|
||||
const isThis = value.type === "ThisExpression";
|
||||
|
||||
if (aliases.includes(name)) {
|
||||
if (!isThis || (node.operator && node.operator !== "=")) {
|
||||
reportBadAssignment(node, name);
|
||||
}
|
||||
} else if (isThis) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: "unexpectedAlias",
|
||||
data: { name },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that a variable declaration of the alias in a program or function
|
||||
* is assigned to the correct value.
|
||||
* @param {string} alias alias the check the assignment of.
|
||||
* @param {Object} scope scope of the current code we are checking.
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
function checkWasAssigned(alias, scope) {
|
||||
const variable = scope.set.get(alias);
|
||||
|
||||
if (!variable) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
variable.defs.some(
|
||||
def =>
|
||||
def.node.type === "VariableDeclarator" &&
|
||||
def.node.init !== null,
|
||||
)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
/*
|
||||
* The alias has been declared and not assigned: check it was
|
||||
* assigned later in the same scope.
|
||||
*/
|
||||
if (
|
||||
!variable.references.some(reference => {
|
||||
const write = reference.writeExpr;
|
||||
|
||||
return (
|
||||
reference.from === scope &&
|
||||
write &&
|
||||
write.type === "ThisExpression" &&
|
||||
write.parent.operator === "="
|
||||
);
|
||||
})
|
||||
) {
|
||||
variable.defs
|
||||
.map(def => def.node)
|
||||
.forEach(node => {
|
||||
reportBadAssignment(node, alias);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check each alias to ensure that is was assigned to the correct value.
|
||||
* @param {ASTNode} node The node that represents the scope to check.
|
||||
* @returns {void}
|
||||
*/
|
||||
function ensureWasAssigned(node) {
|
||||
const scope = sourceCode.getScope(node);
|
||||
|
||||
// if this is program scope we also need to check module scope
|
||||
const extraScope =
|
||||
node.type === "Program" && node.sourceType === "module"
|
||||
? scope.childScopes[0]
|
||||
: null;
|
||||
|
||||
aliases.forEach(alias => {
|
||||
checkWasAssigned(alias, scope);
|
||||
|
||||
if (extraScope) {
|
||||
checkWasAssigned(alias, extraScope);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
"Program:exit": ensureWasAssigned,
|
||||
"FunctionExpression:exit": ensureWasAssigned,
|
||||
"FunctionDeclaration:exit": ensureWasAssigned,
|
||||
|
||||
VariableDeclarator(node) {
|
||||
const id = node.id;
|
||||
const isDestructuring =
|
||||
id.type === "ArrayPattern" || id.type === "ObjectPattern";
|
||||
|
||||
if (node.init !== null && !isDestructuring) {
|
||||
checkAssignment(node, id.name, node.init);
|
||||
}
|
||||
},
|
||||
|
||||
AssignmentExpression(node) {
|
||||
if (node.left.type === "Identifier") {
|
||||
checkAssignment(node, node.left.name, node.right);
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
Reference in New Issue
Block a user