update
This commit is contained in:
@@ -0,0 +1,14 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = _classPrivateFieldBase;
|
||||
function _classPrivateFieldBase(receiver, privateKey) {
|
||||
if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) {
|
||||
throw new TypeError("attempted to use private field on non-instance");
|
||||
}
|
||||
return receiver;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=classPrivateFieldLooseBase.js.map
|
||||
@@ -0,0 +1,101 @@
|
||||
# p-limit
|
||||
|
||||
> Run multiple promise-returning & async functions with limited concurrency
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install p-limit
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const pLimit = require('p-limit');
|
||||
|
||||
const limit = pLimit(1);
|
||||
|
||||
const input = [
|
||||
limit(() => fetchSomething('foo')),
|
||||
limit(() => fetchSomething('bar')),
|
||||
limit(() => doSomething())
|
||||
];
|
||||
|
||||
(async () => {
|
||||
// Only one promise is run at once
|
||||
const result = await Promise.all(input);
|
||||
console.log(result);
|
||||
})();
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### pLimit(concurrency)
|
||||
|
||||
Returns a `limit` function.
|
||||
|
||||
#### concurrency
|
||||
|
||||
Type: `number`\
|
||||
Minimum: `1`\
|
||||
Default: `Infinity`
|
||||
|
||||
Concurrency limit.
|
||||
|
||||
### limit(fn, ...args)
|
||||
|
||||
Returns the promise returned by calling `fn(...args)`.
|
||||
|
||||
#### fn
|
||||
|
||||
Type: `Function`
|
||||
|
||||
Promise-returning/async function.
|
||||
|
||||
#### args
|
||||
|
||||
Any arguments to pass through to `fn`.
|
||||
|
||||
Support for passing arguments on to the `fn` is provided in order to be able to avoid creating unnecessary closures. You probably don't need this optimization unless you're pushing a *lot* of functions.
|
||||
|
||||
### limit.activeCount
|
||||
|
||||
The number of promises that are currently running.
|
||||
|
||||
### limit.pendingCount
|
||||
|
||||
The number of promises that are waiting to run (i.e. their internal `fn` was not called yet).
|
||||
|
||||
### limit.clearQueue()
|
||||
|
||||
Discard pending promises that are waiting to run.
|
||||
|
||||
This might be useful if you want to teardown the queue at the end of your program's lifecycle or discard any function calls referencing an intermediary state of your app.
|
||||
|
||||
Note: This does not cancel promises that are already running.
|
||||
|
||||
## FAQ
|
||||
|
||||
### How is this different from the [`p-queue`](https://github.com/sindresorhus/p-queue) package?
|
||||
|
||||
This package is only about limiting the number of concurrent executions, while `p-queue` is a fully featured queue implementation with lots of different options, introspection, and ability to pause the queue.
|
||||
|
||||
## Related
|
||||
|
||||
- [p-queue](https://github.com/sindresorhus/p-queue) - Promise queue with concurrency control
|
||||
- [p-throttle](https://github.com/sindresorhus/p-throttle) - Throttle promise-returning & async functions
|
||||
- [p-debounce](https://github.com/sindresorhus/p-debounce) - Debounce promise-returning & async functions
|
||||
- [p-all](https://github.com/sindresorhus/p-all) - Run promise-returning & async functions concurrently with optional limited concurrency
|
||||
- [More…](https://github.com/sindresorhus/promise-fun)
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<b>
|
||||
<a href="https://tidelift.com/subscription/pkg/npm-p-limit?utm_source=npm-p-limit&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
|
||||
</b>
|
||||
<br>
|
||||
<sub>
|
||||
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
|
||||
</sub>
|
||||
</div>
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"names":["_tdzError","name","ReferenceError"],"sources":["../../src/helpers/tdz.ts"],"sourcesContent":["/* @minVersion 7.5.5 */\n\nexport default function _tdzError(name: string): never {\n throw new ReferenceError(name + \" is not defined - temporal dead zone\");\n}\n"],"mappings":";;;;;;AAEe,SAASA,SAASA,CAACC,IAAY,EAAS;EACrD,MAAM,IAAIC,cAAc,CAACD,IAAI,GAAG,sCAAsC,CAAC;AACzE","ignoreList":[]}
|
||||
@@ -0,0 +1,580 @@
|
||||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||
import resolveUri from '@jridgewell/resolve-uri';
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
memo.lastIndex = ++index;
|
||||
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = parse(map);
|
||||
if (!('sections' in parsed)) {
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
}
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
const ignoreList = [];
|
||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
ignoreList,
|
||||
};
|
||||
return presortedDecodedMap(joined);
|
||||
};
|
||||
function parse(map) {
|
||||
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||
}
|
||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const { sections } = input;
|
||||
for (let i = 0; i < sections.length; i++) {
|
||||
const { map, offset } = sections[i];
|
||||
let sl = stopLine;
|
||||
let sc = stopColumn;
|
||||
if (i + 1 < sections.length) {
|
||||
const nextOffset = sections[i + 1].offset;
|
||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||
if (sl === stopLine) {
|
||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||
}
|
||||
else if (sl < stopLine) {
|
||||
sc = columnOffset + nextOffset.column;
|
||||
}
|
||||
}
|
||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||
}
|
||||
}
|
||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const parsed = parse(input);
|
||||
if ('sections' in parsed)
|
||||
return recurse(...arguments);
|
||||
const map = new TraceMap(parsed, mapUrl);
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = decodedMappings(map);
|
||||
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(names, map.names);
|
||||
if (contents)
|
||||
append(sourcesContent, contents);
|
||||
else
|
||||
for (let i = 0; i < resolvedSources.length; i++)
|
||||
sourcesContent.push(null);
|
||||
if (ignores)
|
||||
for (let i = 0; i < ignores.length; i++)
|
||||
ignoreList.push(ignores[i] + sourcesOffset);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const lineI = lineOffset + i;
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||
// still need to check that we don't overstep lines, too.
|
||||
if (lineI > stopLine)
|
||||
return;
|
||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||
const out = getLine(mappings, lineI);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (lineI === stopLine && column >= stopColumn)
|
||||
return;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
out.push(seg.length === 4
|
||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
function getLine(arr, index) {
|
||||
for (let i = arr.length; i <= index; i++)
|
||||
arr[i] = [];
|
||||
return arr[index];
|
||||
}
|
||||
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map._decodedMemo)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names || [];
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function encodedMappings(map) {
|
||||
var _a;
|
||||
var _b;
|
||||
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = encode(cast(map)._decoded)));
|
||||
}
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function decodedMappings(map) {
|
||||
var _a;
|
||||
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
|
||||
}
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
function traceSegment(map, line, column) {
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
return index === -1 ? null : segments[index];
|
||||
}
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
function originalPositionFor(map, needle) {
|
||||
let { line, column, bias } = needle;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return OMapping(null, null, null, null);
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (index === -1)
|
||||
return OMapping(null, null, null, null);
|
||||
const segment = segments[index];
|
||||
if (segment.length === 1)
|
||||
return OMapping(null, null, null, null);
|
||||
const { names, resolvedSources } = map;
|
||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||
}
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
function generatedPositionFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||
}
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
function allGeneratedPositionsFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||
}
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
function eachMapping(map, cb) {
|
||||
const decoded = decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
function sourceIndex(map, source) {
|
||||
const { sources, resolvedSources } = map;
|
||||
let index = sources.indexOf(source);
|
||||
if (index === -1)
|
||||
index = resolvedSources.indexOf(source);
|
||||
return index;
|
||||
}
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
function sourceContentFor(map, source) {
|
||||
const { sourcesContent } = map;
|
||||
if (sourcesContent == null)
|
||||
return null;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? null : sourcesContent[index];
|
||||
}
|
||||
/**
|
||||
* Determines if the source is marked to ignore by the source map.
|
||||
*/
|
||||
function isIgnored(map, source) {
|
||||
const { ignoreList } = map;
|
||||
if (ignoreList == null)
|
||||
return false;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? false : ignoreList.includes(index);
|
||||
}
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
function presortedDecodedMap(map, mapUrl) {
|
||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||
cast(tracer)._decoded = map.mappings;
|
||||
return tracer;
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function decodedMap(map) {
|
||||
return clone(map, decodedMappings(map));
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function encodedMap(map) {
|
||||
return clone(map, encodedMappings(map));
|
||||
}
|
||||
function clone(map, mappings) {
|
||||
return {
|
||||
version: map.version,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings,
|
||||
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||
};
|
||||
}
|
||||
function OMapping(source, line, column, name) {
|
||||
return { source, line, column, name };
|
||||
}
|
||||
function GMapping(line, column) {
|
||||
return { line, column };
|
||||
}
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return -1;
|
||||
return index;
|
||||
}
|
||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||
// match LEAST_UPPER_BOUND.
|
||||
if (!found && bias === LEAST_UPPER_BOUND)
|
||||
min++;
|
||||
if (min === -1 || min === segments.length)
|
||||
return [];
|
||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||
// to our desired column.
|
||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||
if (!found)
|
||||
min = lowerBound(segments, matchedColumn, min);
|
||||
const max = upperBound(segments, matchedColumn, min);
|
||||
const result = [];
|
||||
for (; min <= max; min++) {
|
||||
const segment = segments[min];
|
||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function generatedPosition(map, source, line, column, bias, all) {
|
||||
var _a;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||
if (all)
|
||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||
if (index === -1)
|
||||
return GMapping(null, null);
|
||||
const segment = segments[index];
|
||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||
}
|
||||
|
||||
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, isIgnored, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
|
||||
//# sourceMappingURL=trace-mapping.mjs.map
|
||||
@@ -0,0 +1,217 @@
|
||||
/**
|
||||
* @fileoverview Rule that warns when identifier names are shorter or longer
|
||||
* than the values provided in configuration.
|
||||
* @author Burak Yigit Kaya aka BYK
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const { getGraphemeCount } = require("../shared/string-utils");
|
||||
const {
|
||||
getModuleExportName,
|
||||
isImportAttributeKey,
|
||||
} = require("./utils/ast-utils");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/** @type {import('../shared/types').Rule} */
|
||||
module.exports = {
|
||||
meta: {
|
||||
type: "suggestion",
|
||||
|
||||
defaultOptions: [
|
||||
{
|
||||
exceptionPatterns: [],
|
||||
exceptions: [],
|
||||
min: 2,
|
||||
properties: "always",
|
||||
},
|
||||
],
|
||||
|
||||
docs: {
|
||||
description: "Enforce minimum and maximum identifier lengths",
|
||||
recommended: false,
|
||||
frozen: true,
|
||||
url: "https://eslint.org/docs/latest/rules/id-length",
|
||||
},
|
||||
|
||||
schema: [
|
||||
{
|
||||
type: "object",
|
||||
properties: {
|
||||
min: {
|
||||
type: "integer",
|
||||
},
|
||||
max: {
|
||||
type: "integer",
|
||||
},
|
||||
exceptions: {
|
||||
type: "array",
|
||||
uniqueItems: true,
|
||||
items: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
exceptionPatterns: {
|
||||
type: "array",
|
||||
uniqueItems: true,
|
||||
items: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
properties: {
|
||||
enum: ["always", "never"],
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
},
|
||||
],
|
||||
messages: {
|
||||
tooShort: "Identifier name '{{name}}' is too short (< {{min}}).",
|
||||
tooShortPrivate:
|
||||
"Identifier name '#{{name}}' is too short (< {{min}}).",
|
||||
tooLong: "Identifier name '{{name}}' is too long (> {{max}}).",
|
||||
tooLongPrivate:
|
||||
"Identifier name #'{{name}}' is too long (> {{max}}).",
|
||||
},
|
||||
},
|
||||
|
||||
create(context) {
|
||||
const [options] = context.options;
|
||||
const { max: maxLength = Infinity, min: minLength } = options;
|
||||
const properties = options.properties !== "never";
|
||||
const exceptions = new Set(options.exceptions);
|
||||
const exceptionPatterns = options.exceptionPatterns.map(
|
||||
pattern => new RegExp(pattern, "u"),
|
||||
);
|
||||
const reportedNodes = new Set();
|
||||
|
||||
/**
|
||||
* Checks if a string matches the provided exception patterns
|
||||
* @param {string} name The string to check.
|
||||
* @returns {boolean} if the string is a match
|
||||
* @private
|
||||
*/
|
||||
function matchesExceptionPattern(name) {
|
||||
return exceptionPatterns.some(pattern => pattern.test(name));
|
||||
}
|
||||
|
||||
const SUPPORTED_EXPRESSIONS = {
|
||||
MemberExpression:
|
||||
properties &&
|
||||
function (parent) {
|
||||
return (
|
||||
!parent.computed &&
|
||||
// regular property assignment
|
||||
((parent.parent.left === parent &&
|
||||
parent.parent.type === "AssignmentExpression") ||
|
||||
// or the last identifier in an ObjectPattern destructuring
|
||||
(parent.parent.type === "Property" &&
|
||||
parent.parent.value === parent &&
|
||||
parent.parent.parent.type === "ObjectPattern" &&
|
||||
parent.parent.parent.parent.left ===
|
||||
parent.parent.parent))
|
||||
);
|
||||
},
|
||||
AssignmentPattern(parent, node) {
|
||||
return parent.left === node;
|
||||
},
|
||||
VariableDeclarator(parent, node) {
|
||||
return parent.id === node;
|
||||
},
|
||||
Property(parent, node) {
|
||||
if (parent.parent.type === "ObjectPattern") {
|
||||
const isKeyAndValueSame =
|
||||
parent.value.name === parent.key.name;
|
||||
|
||||
return (
|
||||
(!isKeyAndValueSame && parent.value === node) ||
|
||||
(isKeyAndValueSame && parent.key === node && properties)
|
||||
);
|
||||
}
|
||||
return (
|
||||
properties &&
|
||||
!isImportAttributeKey(node) &&
|
||||
!parent.computed &&
|
||||
parent.key.name === node.name
|
||||
);
|
||||
},
|
||||
ImportSpecifier(parent, node) {
|
||||
return (
|
||||
parent.local === node &&
|
||||
getModuleExportName(parent.imported) !==
|
||||
getModuleExportName(parent.local)
|
||||
);
|
||||
},
|
||||
ImportDefaultSpecifier: true,
|
||||
ImportNamespaceSpecifier: true,
|
||||
RestElement: true,
|
||||
FunctionExpression: true,
|
||||
ArrowFunctionExpression: true,
|
||||
ClassDeclaration: true,
|
||||
FunctionDeclaration: true,
|
||||
MethodDefinition: true,
|
||||
PropertyDefinition: true,
|
||||
CatchClause: true,
|
||||
ArrayPattern: true,
|
||||
};
|
||||
|
||||
return {
|
||||
[["Identifier", "PrivateIdentifier"]](node) {
|
||||
const name = node.name;
|
||||
const parent = node.parent;
|
||||
|
||||
const nameLength = getGraphemeCount(name);
|
||||
|
||||
const isShort = nameLength < minLength;
|
||||
const isLong = nameLength > maxLength;
|
||||
|
||||
if (
|
||||
!(isShort || isLong) ||
|
||||
exceptions.has(name) ||
|
||||
matchesExceptionPattern(name)
|
||||
) {
|
||||
return; // Nothing to report
|
||||
}
|
||||
|
||||
const isValidExpression = SUPPORTED_EXPRESSIONS[parent.type];
|
||||
|
||||
/*
|
||||
* We used the range instead of the node because it's possible
|
||||
* for the same identifier to be represented by two different
|
||||
* nodes, with the most clear example being shorthand properties:
|
||||
* { foo }
|
||||
* In this case, "foo" is represented by one node for the name
|
||||
* and one for the value. The only way to know they are the same
|
||||
* is to look at the range.
|
||||
*/
|
||||
if (
|
||||
isValidExpression &&
|
||||
!reportedNodes.has(node.range.toString()) &&
|
||||
(isValidExpression === true ||
|
||||
isValidExpression(parent, node))
|
||||
) {
|
||||
reportedNodes.add(node.range.toString());
|
||||
|
||||
let messageId = isShort ? "tooShort" : "tooLong";
|
||||
|
||||
if (node.type === "PrivateIdentifier") {
|
||||
messageId += "Private";
|
||||
}
|
||||
|
||||
context.report({
|
||||
node,
|
||||
messageId,
|
||||
data: { name, min: minLength, max: maxLength },
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2":"K D E F A B mC"},B:{"1":"0 9 C L M G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I"},C:{"1":"0 9 dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"1 2 3 4 5 6 7 8 nC LC J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB qC rC"},D:{"1":"0 9 eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","2":"1 2 3 4 5 6 7 8 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB"},E:{"1":"F A B C L M G wC TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","2":"J PB K D E sC SC tC uC vC"},F:{"1":"0 RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z","2":"1 2 3 4 5 6 7 8 F B C G N O P QB 4C 5C 6C 7C FC kC 8C GC"},G:{"1":"ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","2":"E SC 9C lC AD BD CD DD"},H:{"2":"WD"},I:{"1":"I","2":"LC J XD YD ZD aD lC bD cD"},J:{"2":"D A"},K:{"1":"H","2":"A B C FC kC GC"},L:{"1":"I"},M:{"1":"EC"},N:{"2":"A B"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"1":"oD"},R:{"1":"pD"},S:{"1":"qD rD"}},B:6,C:"String.prototype.includes",D:true};
|
||||
@@ -0,0 +1,131 @@
|
||||
# Changelog
|
||||
|
||||
## [7.1.3] - 2025-01-22
|
||||
|
||||
### Fixed
|
||||
|
||||
- Bump napi-build-utils from 1 to 2 ([#204](https://github.com/prebuild/prebuild-install/issues/204)) ([`1bf4a15`](https://github.com/prebuild/prebuild-install/commit/1bf4a15)) (Bailey Pearson)
|
||||
|
||||
## [7.1.2] - 2024-02-29
|
||||
|
||||
### Fixed
|
||||
|
||||
- Support environments where MD5 is prohibited ([#191](https://github.com/prebuild/prebuild-install/issues/191)) ([`9140468`](https://github.com/prebuild/prebuild-install/commit/9140468)) (Tomasz Szuba)
|
||||
|
||||
## [7.1.1] - 2022-06-07
|
||||
|
||||
### Fixed
|
||||
|
||||
- Replace use of npmlog dependency with console.error ([#182](https://github.com/prebuild/prebuild-install/issues/182)) ([`4e2284c`](https://github.com/prebuild/prebuild-install/commit/4e2284c)) (Lovell Fuller)
|
||||
- Ensure script output can be captured by tests ([#181](https://github.com/prebuild/prebuild-install/issues/181)) ([`d1853cb`](https://github.com/prebuild/prebuild-install/commit/d1853cb)) (Lovell Fuller)
|
||||
|
||||
## [7.1.0] - 2022-04-20
|
||||
|
||||
### Changed
|
||||
|
||||
- Allow setting libc to glibc on non-glibc platform ([#176](https://github.com/prebuild/prebuild-install/issues/176)) ([`f729abb`](https://github.com/prebuild/prebuild-install/commit/f729abb)) (Joona Heinikoski)
|
||||
|
||||
## [7.0.1] - 2022-01-28
|
||||
|
||||
### Changed
|
||||
|
||||
- Upgrade to the latest version of `detect-libc` ([#166](https://github.com/prebuild/prebuild-install/issues/166)) ([`f71c6b9`](https://github.com/prebuild/prebuild-install/commit/f71c6b9)) (Lovell Fuller)
|
||||
|
||||
## [7.0.0] - 2021-11-12
|
||||
|
||||
### Changed
|
||||
|
||||
- **Breaking:** bump `node-abi` so that Electron 14+ gets correct ABI ([#161](https://github.com/prebuild/prebuild-install/issues/161)) ([`477f347`](https://github.com/prebuild/prebuild-install/commit/477f347)) (csett86). Drops support of Node.js < 10.
|
||||
- Bump `simple-get` ([`7468c14`](https://github.com/prebuild/prebuild-install/commit/7468c14)) (Vincent Weevers).
|
||||
|
||||
## [6.1.4] - 2021-08-11
|
||||
|
||||
### Fixed
|
||||
|
||||
- Move auth token to header instead of query param ([#160](https://github.com/prebuild/prebuild-install/issues/160)) ([`b3fad76`](https://github.com/prebuild/prebuild-install/commit/b3fad76)) (nicolai-nordic)
|
||||
- Remove `_` prefix as it isn't allowed by npm config ([#153](https://github.com/prebuild/prebuild-install/issues/153)) ([`a964e5b`](https://github.com/prebuild/prebuild-install/commit/a964e5b)) (Tom Boothman)
|
||||
- Make `rc.path` absolute ([#158](https://github.com/prebuild/prebuild-install/issues/158)) ([`57bcc06`](https://github.com/prebuild/prebuild-install/commit/57bcc06)) (George Waters).
|
||||
|
||||
## [6.1.3] - 2021-06-03
|
||||
|
||||
### Changed
|
||||
|
||||
- Inline no longer maintained `noop-logger` ([#155](https://github.com/prebuild/prebuild-install/issues/155)) ([`e08d75a`](https://github.com/prebuild/prebuild-install/commit/e08d75a)) (Alexandru Dima)
|
||||
- Point users towards `prebuildify` in README ([#150](https://github.com/prebuild/prebuild-install/issues/150)) ([`5ee1a2f`](https://github.com/prebuild/prebuild-install/commit/5ee1a2f)) (Vincent Weevers)
|
||||
|
||||
## [6.1.2] - 2021-04-24
|
||||
|
||||
### Fixed
|
||||
|
||||
- Support URL-safe strings in scoped packages ([#148](https://github.com/prebuild/prebuild-install/issues/148)) ([`db36c7a`](https://github.com/prebuild/prebuild-install/commit/db36c7a)) (Marco)
|
||||
|
||||
## [6.1.1] - 2021-04-04
|
||||
|
||||
### Fixed
|
||||
|
||||
- Support `force` & `buildFromSource` options in yarn ([#140](https://github.com/prebuild/prebuild-install/issues/140)) ([`8cb1ced`](https://github.com/prebuild/prebuild-install/commit/8cb1ced)) (João Moreno)
|
||||
- Bump `node-abi` to prevent dedupe (closes [#135](https://github.com/prebuild/prebuild-install/issues/135)) ([`2950fb2`](https://github.com/prebuild/prebuild-install/commit/2950fb2)) (Vincent Weevers)
|
||||
|
||||
## [6.1.0] - 2021-04-03
|
||||
|
||||
### Added
|
||||
|
||||
- Restore local prebuilds feature ([#137](https://github.com/prebuild/prebuild-install/issues/137)) ([`dc4e5ea`](https://github.com/prebuild/prebuild-install/commit/dc4e5ea)) (Wes Roberts). Previously removed in [#81](https://github.com/prebuild/prebuild-install/issues/81) / [`a069253`](https://github.com/prebuild/prebuild-install/commit/a06925378d38ca821bfa93aa4c1fdedc253b2420).
|
||||
|
||||
## [6.0.1] - 2021-02-14
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixes empty `--tag-prefix` ([#143](https://github.com/prebuild/prebuild-install/issues/143)) ([**@mathiask88**](https://github.com/mathiask88))
|
||||
|
||||
## [6.0.0] - 2020-10-23
|
||||
|
||||
### Changed
|
||||
|
||||
- **Breaking:** don't skip downloads in standalone mode ([`b6f3b36`](https://github.com/prebuild/prebuild-install/commit/b6f3b36)) ([**@vweevers**](https://github.com/vweevers))
|
||||
|
||||
### Added
|
||||
|
||||
- Document cross platform options ([`e5c9a5a`](https://github.com/prebuild/prebuild-install/commit/e5c9a5a)) ([**@fishbone1**](https://github.com/fishbone1))
|
||||
|
||||
### Removed
|
||||
|
||||
- **Breaking:** remove `--compile` and `--prebuild` options ([`94f2492`](https://github.com/prebuild/prebuild-install/commit/94f2492)) ([**@vweevers**](https://github.com/vweevers))
|
||||
|
||||
### Fixed
|
||||
|
||||
- Support npm 7 ([`8acccac`](https://github.com/prebuild/prebuild-install/commit/8acccac), [`08eaf6d`](https://github.com/prebuild/prebuild-install/commit/08eaf6d), [`22175b8`](https://github.com/prebuild/prebuild-install/commit/22175b8)) ([**@vweevers**](https://github.com/vweevers))
|
||||
|
||||
## [5.3.6] - 2020-10-20
|
||||
|
||||
### Changed
|
||||
|
||||
- Replace `mkdirp` dependency with `mkdirp-classic` ([**@ralphtheninja**](https://github.com/ralphtheninja))
|
||||
|
||||
[7.1.3]: https://github.com/prebuild/prebuild-install/releases/tag/v7.1.3
|
||||
|
||||
[7.1.2]: https://github.com/prebuild/prebuild-install/releases/tag/v7.1.2
|
||||
|
||||
[7.1.1]: https://github.com/prebuild/prebuild-install/releases/tag/v7.1.1
|
||||
|
||||
[7.1.0]: https://github.com/prebuild/prebuild-install/releases/tag/v7.1.0
|
||||
|
||||
[7.0.1]: https://github.com/prebuild/prebuild-install/releases/tag/v7.0.1
|
||||
|
||||
[7.0.0]: https://github.com/prebuild/prebuild-install/releases/tag/v7.0.0
|
||||
|
||||
[6.1.4]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.4
|
||||
|
||||
[6.1.3]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.3
|
||||
|
||||
[6.1.2]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.2
|
||||
|
||||
[6.1.1]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.1
|
||||
|
||||
[6.1.0]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.0
|
||||
|
||||
[6.0.1]: https://github.com/prebuild/prebuild-install/releases/tag/v6.0.1
|
||||
|
||||
[6.0.0]: https://github.com/prebuild/prebuild-install/releases/tag/v6.0.0
|
||||
|
||||
[5.3.6]: https://github.com/prebuild/prebuild-install/releases/tag/v5.3.6
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={C:{"7":0.00934,"72":0.00311,"110":0.00934,"112":0.00311,"114":0.00623,"115":0.0965,"121":0.00311,"122":0.00311,"124":0.00623,"126":0.00311,"127":0.02179,"128":0.00934,"130":0.00311,"132":0.00934,"134":0.00623,"135":0.33309,"136":1.39151,"137":0.02802,_:"2 3 4 5 6 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 111 113 116 117 118 119 120 123 125 129 131 133 138 139 140 3.5 3.6"},D:{"39":0.00623,"40":0.00623,"41":0.00311,"42":0.00623,"43":0.00311,"44":0.00311,"45":0.00311,"46":0.00311,"47":0.00623,"48":0.00311,"49":0.00311,"50":0.00623,"51":0.00623,"52":0.00311,"53":0.00623,"54":0.00623,"55":0.00311,"56":0.00311,"57":0.00311,"58":0.00311,"59":0.00311,"60":0.00311,"68":0.00311,"70":0.00311,"73":0.00934,"74":0.00311,"76":0.00311,"77":0.00311,"79":0.05603,"81":0.00623,"83":0.02802,"86":0.00623,"87":0.16499,"88":0.14631,"90":0.00311,"91":0.00623,"93":0.00934,"94":0.00623,"97":0.00311,"99":0.00311,"100":0.00623,"103":0.01868,"105":0.04981,"107":0.01245,"108":0.00934,"109":0.29262,"111":0.03113,"113":0.00623,"114":0.00311,"116":0.04981,"117":0.00311,"119":0.00311,"120":0.00623,"121":0.00311,"122":0.04047,"123":0.01245,"124":0.01557,"125":0.02179,"126":0.01868,"127":0.01245,"128":0.03736,"129":0.01557,"130":0.03424,"131":0.18055,"132":0.20857,"133":4.6944,"134":9.82152,"135":0.00934,"136":0.00311,_:"4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 61 62 63 64 65 66 67 69 71 72 75 78 80 84 85 89 92 95 96 98 101 102 104 106 110 112 115 118 137 138"},F:{"46":0.00623,"87":0.01245,"88":0.02179,"115":0.0965,"116":0.06537,"117":0.21791,_:"9 11 12 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 47 48 49 50 51 52 53 54 55 56 57 58 60 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 9.5-9.6 10.0-10.1 10.5 10.6 11.1 11.5 11.6 12.1"},B:{"15":0.00311,"18":0.00623,"90":0.00311,"92":0.01868,"100":0.0249,"109":0.01245,"113":0.00311,"114":0.00311,"116":0.00623,"117":0.00311,"119":0.00311,"121":0.00623,"122":0.00623,"123":0.00311,"124":0.01245,"126":0.01868,"127":0.00623,"128":0.01245,"129":0.01245,"130":0.01868,"131":0.06849,"132":0.15254,"133":2.20712,"134":4.78468,_:"12 13 14 16 17 79 80 81 83 84 85 86 87 88 89 91 93 94 95 96 97 98 99 101 102 103 104 105 106 107 108 110 111 112 115 118 120 125"},E:{"14":0.00623,_:"0 4 5 6 7 8 9 10 11 12 13 15 3.1 3.2 5.1 6.1 7.1 9.1 10.1 11.1 12.1","13.1":0.00311,"14.1":0.01245,"15.1":0.00311,"15.2-15.3":0.00311,"15.4":0.00623,"15.5":0.00311,"15.6":0.19301,"16.0":0.00934,"16.1":0.01868,"16.2":0.00311,"16.3":0.01868,"16.4":0.01245,"16.5":0.05292,"16.6":0.22102,"17.0":0.03113,"17.1":0.01868,"17.2":0.00934,"17.3":0.01245,"17.4":0.01557,"17.5":0.09962,"17.6":0.20857,"18.0":0.02802,"18.1":0.11518,"18.2":0.11207,"18.3":1.00861,"18.4":0.03424},G:{"8":0,"3.2":0,"4.0-4.1":0,"4.2-4.3":0.00162,"5.0-5.1":0,"6.0-6.1":0.00486,"7.0-7.1":0.00324,"8.1-8.4":0,"9.0-9.2":0.00243,"9.3":0.01133,"10.0-10.2":0.00081,"10.3":0.01861,"11.0-11.2":0.08578,"11.3-11.4":0.00566,"12.0-12.1":0.00324,"12.2-12.5":0.08011,"13.0-13.1":0.00162,"13.2":0.00243,"13.3":0.00324,"13.4-13.7":0.01133,"14.0-14.4":0.02832,"14.5-14.8":0.03399,"15.0-15.1":0.01861,"15.2-15.3":0.01861,"15.4":0.02266,"15.5":0.0259,"15.6-15.8":0.31883,"16.0":0.04532,"16.1":0.09306,"16.2":0.04855,"16.3":0.08416,"16.4":0.01861,"16.5":0.0348,"16.6-16.7":0.37791,"17.0":0.02266,"17.1":0.04046,"17.2":0.03075,"17.3":0.04289,"17.4":0.08578,"17.5":0.19098,"17.6-17.7":0.55432,"18.0":0.15537,"18.1":0.50819,"18.2":0.22739,"18.3":4.75256,"18.4":0.0704},P:{"4":0.11573,"20":0.01052,"21":0.02104,"22":0.09469,"23":0.05261,"24":0.16834,"25":0.14729,"26":0.41032,"27":4.12425,_:"5.0-5.4 6.2-6.4 8.2 9.2 10.1 11.1-11.2 12.0 14.0 15.0 18.0","7.2-7.4":0.11573,"13.0":0.01052,"16.0":0.01052,"17.0":0.01052,"19.0":0.03156},I:{"0":0.02749,"3":0,"4":0,"2.1":0,"2.2":0,"2.3":0,"4.1":0,"4.2-4.3":0.00001,"4.4":0,"4.4.3-4.4.4":0.00003},K:{"0":0.30992,_:"10 11 12 11.1 11.5 12.1"},A:{"10":0.00478,"11":0.12908,_:"6 7 8 9 5.5"},S:{_:"2.5 3.0-3.1"},J:{_:"7 10"},N:{_:"10 11"},R:{_:"0"},M:{"0":0.17218},Q:{"14.9":0.26859},O:{"0":0.26171},H:{"0":0},L:{"0":56.50486}};
|
||||
Binary file not shown.
@@ -0,0 +1,218 @@
|
||||
# @ampproject/remapping
|
||||
|
||||
> Remap sequential sourcemaps through transformations to point at the original source code
|
||||
|
||||
Remapping allows you to take the sourcemaps generated through transforming your code and "remap"
|
||||
them to the original source locations. Think "my minified code, transformed with babel and bundled
|
||||
with webpack", all pointing to the correct location in your original source code.
|
||||
|
||||
With remapping, none of your source code transformations need to be aware of the input's sourcemap,
|
||||
they only need to generate an output sourcemap. This greatly simplifies building custom
|
||||
transformations (think a find-and-replace).
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @ampproject/remapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
function remapping(
|
||||
map: SourceMap | SourceMap[],
|
||||
loader: (file: string, ctx: LoaderContext) => (SourceMap | null | undefined),
|
||||
options?: { excludeContent: boolean, decodedMappings: boolean }
|
||||
): SourceMap;
|
||||
|
||||
// LoaderContext gives the loader the importing sourcemap, tree depth, the ability to override the
|
||||
// "source" location (where child sources are resolved relative to, or the location of original
|
||||
// source), and the ability to override the "content" of an original source for inclusion in the
|
||||
// output sourcemap.
|
||||
type LoaderContext = {
|
||||
readonly importer: string;
|
||||
readonly depth: number;
|
||||
source: string;
|
||||
content: string | null | undefined;
|
||||
}
|
||||
```
|
||||
|
||||
`remapping` takes the final output sourcemap, and a `loader` function. For every source file pointer
|
||||
in the sourcemap, the `loader` will be called with the resolved path. If the path itself represents
|
||||
a transformed file (it has a sourcmap associated with it), then the `loader` should return that
|
||||
sourcemap. If not, the path will be treated as an original, untransformed source code.
|
||||
|
||||
```js
|
||||
// Babel transformed "helloworld.js" into "transformed.js"
|
||||
const transformedMap = JSON.stringify({
|
||||
file: 'transformed.js',
|
||||
// 1st column of 2nd line of output file translates into the 1st source
|
||||
// file, line 3, column 2
|
||||
mappings: ';CAEE',
|
||||
sources: ['helloworld.js'],
|
||||
version: 3,
|
||||
});
|
||||
|
||||
// Uglify minified "transformed.js" into "transformed.min.js"
|
||||
const minifiedTransformedMap = JSON.stringify({
|
||||
file: 'transformed.min.js',
|
||||
// 0th column of 1st line of output file translates into the 1st source
|
||||
// file, line 2, column 1.
|
||||
mappings: 'AACC',
|
||||
names: [],
|
||||
sources: ['transformed.js'],
|
||||
version: 3,
|
||||
});
|
||||
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
// The "transformed.js" file is an transformed file.
|
||||
if (file === 'transformed.js') {
|
||||
// The root importer is empty.
|
||||
console.assert(ctx.importer === '');
|
||||
// The depth in the sourcemap tree we're currently loading.
|
||||
// The root `minifiedTransformedMap` is depth 0, and its source children are depth 1, etc.
|
||||
console.assert(ctx.depth === 1);
|
||||
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
// Loader will be called to load transformedMap's source file pointers as well.
|
||||
console.assert(file === 'helloworld.js');
|
||||
// `transformed.js`'s sourcemap points into `helloworld.js`.
|
||||
console.assert(ctx.importer === 'transformed.js');
|
||||
// This is a source child of `transformed`, which is a source child of `minifiedTransformedMap`.
|
||||
console.assert(ctx.depth === 2);
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// file: 'transpiled.min.js',
|
||||
// mappings: 'AAEE',
|
||||
// sources: ['helloworld.js'],
|
||||
// version: 3,
|
||||
// };
|
||||
```
|
||||
|
||||
In this example, `loader` will be called twice:
|
||||
|
||||
1. `"transformed.js"`, the first source file pointer in the `minifiedTransformedMap`. We return the
|
||||
associated sourcemap for it (its a transformed file, after all) so that sourcemap locations can
|
||||
be traced through it into the source files it represents.
|
||||
2. `"helloworld.js"`, our original, unmodified source code. This file does not have a sourcemap, so
|
||||
we return `null`.
|
||||
|
||||
The `remapped` sourcemap now points from `transformed.min.js` into locations in `helloworld.js`. If
|
||||
you were to read the `mappings`, it says "0th column of the first line output line points to the 1st
|
||||
column of the 2nd line of the file `helloworld.js`".
|
||||
|
||||
### Multiple transformations of a file
|
||||
|
||||
As a convenience, if you have multiple single-source transformations of a file, you may pass an
|
||||
array of sourcemap files in the order of most-recent transformation sourcemap first. Note that this
|
||||
changes the `importer` and `depth` of each call to our loader. So our above example could have been
|
||||
written as:
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
[minifiedTransformedMap, transformedMap],
|
||||
() => null
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// file: 'transpiled.min.js',
|
||||
// mappings: 'AAEE',
|
||||
// sources: ['helloworld.js'],
|
||||
// version: 3,
|
||||
// };
|
||||
```
|
||||
|
||||
### Advanced control of the loading graph
|
||||
|
||||
#### `source`
|
||||
|
||||
The `source` property can overridden to any value to change the location of the current load. Eg,
|
||||
for an original source file, it allows us to change the location to the original source regardless
|
||||
of what the sourcemap source entry says. And for transformed files, it allows us to change the
|
||||
relative resolving location for child sources of the loaded sourcemap.
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
if (file === 'transformed.js') {
|
||||
// We pretend the transformed.js file actually exists in the 'src/' directory. When the nested
|
||||
// source files are loaded, they will now be relative to `src/`.
|
||||
ctx.source = 'src/transformed.js';
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
console.assert(file === 'src/helloworld.js');
|
||||
// We could futher change the source of this original file, eg, to be inside a nested directory
|
||||
// itself. This will be reflected in the remapped sourcemap.
|
||||
ctx.source = 'src/nested/transformed.js';
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// …,
|
||||
// sources: ['src/nested/helloworld.js'],
|
||||
// };
|
||||
```
|
||||
|
||||
|
||||
#### `content`
|
||||
|
||||
The `content` property can be overridden when we encounter an original source file. Eg, this allows
|
||||
you to manually provide the source content of the original file regardless of whether the
|
||||
`sourcesContent` field is present in the parent sourcemap. It can also be set to `null` to remove
|
||||
the source content.
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
if (file === 'transformed.js') {
|
||||
// transformedMap does not include a `sourcesContent` field, so usually the remapped sourcemap
|
||||
// would not include any `sourcesContent` values.
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
console.assert(file === 'helloworld.js');
|
||||
// We can read the file to provide the source content.
|
||||
ctx.content = fs.readFileSync(file, 'utf8');
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// …,
|
||||
// sourcesContent: [
|
||||
// 'console.log("Hello world!")',
|
||||
// ],
|
||||
// };
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
#### excludeContent
|
||||
|
||||
By default, `excludeContent` is `false`. Passing `{ excludeContent: true }` will exclude the
|
||||
`sourcesContent` field from the returned sourcemap. This is mainly useful when you want to reduce
|
||||
the size out the sourcemap.
|
||||
|
||||
#### decodedMappings
|
||||
|
||||
By default, `decodedMappings` is `false`. Passing `{ decodedMappings: true }` will leave the
|
||||
`mappings` field in a [decoded state](https://github.com/rich-harris/sourcemap-codec) instead of
|
||||
encoding into a VLQ string.
|
||||
@@ -0,0 +1,141 @@
|
||||
import { beforeAll, describe, expect, it, vi } from 'vitest';
|
||||
import { render } from '@testing-library/react';
|
||||
|
||||
import { pdfjs } from '../index.test.js';
|
||||
|
||||
import Canvas from './Canvas.js';
|
||||
|
||||
import failingPage from '../../../../__mocks__/_failing_page.js';
|
||||
|
||||
import { loadPDF, makeAsyncCallback, muteConsole, restoreConsole } from '../../../../test-utils.js';
|
||||
|
||||
import PageContext from '../PageContext.js';
|
||||
|
||||
import type { PDFPageProxy } from 'pdfjs-dist';
|
||||
import type { PageContextType } from '../shared/types.js';
|
||||
|
||||
const pdfFile = loadPDF('./../../__mocks__/_pdf.pdf');
|
||||
|
||||
function renderWithContext(children: React.ReactNode, context: Partial<PageContextType>) {
|
||||
const { rerender, ...otherResult } = render(
|
||||
<PageContext.Provider value={context as PageContextType}>{children}</PageContext.Provider>,
|
||||
);
|
||||
|
||||
return {
|
||||
...otherResult,
|
||||
rerender: (nextChildren: React.ReactNode, nextContext: Partial<PageContextType> = context) =>
|
||||
rerender(
|
||||
<PageContext.Provider value={nextContext as PageContextType}>
|
||||
{nextChildren}
|
||||
</PageContext.Provider>,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
describe('Canvas', () => {
|
||||
// Loaded page
|
||||
let page: PDFPageProxy;
|
||||
let pageWithRendererMocked: PDFPageProxy;
|
||||
|
||||
beforeAll(async () => {
|
||||
const pdf = await pdfjs.getDocument({ data: pdfFile.arrayBuffer }).promise;
|
||||
|
||||
page = await pdf.getPage(1);
|
||||
|
||||
pageWithRendererMocked = Object.assign(page, {
|
||||
render: () => ({
|
||||
promise: new Promise<void>((resolve) => resolve()),
|
||||
cancel: () => {
|
||||
// Intentionally empty
|
||||
},
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
describe('loading', () => {
|
||||
it('renders a page and calls onRenderSuccess callback properly', async () => {
|
||||
const { func: onRenderSuccess, promise: onRenderSuccessPromise } = makeAsyncCallback();
|
||||
|
||||
muteConsole();
|
||||
|
||||
renderWithContext(<Canvas />, {
|
||||
onRenderSuccess,
|
||||
page: pageWithRendererMocked,
|
||||
scale: 1,
|
||||
});
|
||||
|
||||
expect.assertions(1);
|
||||
|
||||
await expect(onRenderSuccessPromise).resolves.toMatchObject([{}]);
|
||||
|
||||
restoreConsole();
|
||||
});
|
||||
|
||||
it('calls onRenderError when failed to render canvas', async () => {
|
||||
const { func: onRenderError, promise: onRenderErrorPromise } = makeAsyncCallback();
|
||||
|
||||
muteConsole();
|
||||
|
||||
renderWithContext(<Canvas />, {
|
||||
onRenderError,
|
||||
page: failingPage,
|
||||
scale: 1,
|
||||
});
|
||||
|
||||
expect.assertions(1);
|
||||
|
||||
await expect(onRenderErrorPromise).resolves.toMatchObject([expect.any(Error)]);
|
||||
|
||||
restoreConsole();
|
||||
});
|
||||
});
|
||||
|
||||
describe('rendering', () => {
|
||||
it('passes canvas element to canvasRef properly', () => {
|
||||
const canvasRef = vi.fn();
|
||||
|
||||
renderWithContext(<Canvas canvasRef={canvasRef} />, {
|
||||
page: pageWithRendererMocked,
|
||||
scale: 1,
|
||||
});
|
||||
|
||||
expect(canvasRef).toHaveBeenCalled();
|
||||
expect(canvasRef).toHaveBeenCalledWith(expect.any(HTMLElement));
|
||||
});
|
||||
|
||||
it('does not request structure tree to be rendered when renderTextLayer = false', async () => {
|
||||
const { func: onRenderSuccess, promise: onRenderSuccessPromise } = makeAsyncCallback();
|
||||
|
||||
const { container } = renderWithContext(<Canvas />, {
|
||||
onRenderSuccess,
|
||||
page: pageWithRendererMocked,
|
||||
renderTextLayer: false,
|
||||
});
|
||||
|
||||
await onRenderSuccessPromise;
|
||||
|
||||
const structTree = container.querySelector('.react-pdf__Page__structTree');
|
||||
|
||||
expect(structTree).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders StructTree when given renderTextLayer = true', async () => {
|
||||
const { func: onGetStructTreeSuccess, promise: onGetStructTreeSuccessPromise } =
|
||||
makeAsyncCallback();
|
||||
|
||||
const { container } = renderWithContext(<Canvas />, {
|
||||
onGetStructTreeSuccess,
|
||||
page: pageWithRendererMocked,
|
||||
renderTextLayer: true,
|
||||
});
|
||||
|
||||
expect.assertions(1);
|
||||
|
||||
await onGetStructTreeSuccessPromise;
|
||||
|
||||
const canvas = container.querySelector('canvas') as HTMLCanvasElement;
|
||||
|
||||
expect(canvas.children.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,330 @@
|
||||
/**
|
||||
* @fileoverview Collects the built-in rules into a map structure so that they can be imported all at once and without
|
||||
* using the file-system directly.
|
||||
* @author Peter (Somogyvari) Metz
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
/* eslint sort-keys: ["error", "asc"] -- More readable for long list */
|
||||
|
||||
const { LazyLoadingRuleMap } = require("./utils/lazy-loading-rule-map");
|
||||
|
||||
/** @type {Map<string, import("../shared/types").Rule>} */
|
||||
module.exports = new LazyLoadingRuleMap(
|
||||
Object.entries({
|
||||
"accessor-pairs": () => require("./accessor-pairs"),
|
||||
"array-bracket-newline": () => require("./array-bracket-newline"),
|
||||
"array-bracket-spacing": () => require("./array-bracket-spacing"),
|
||||
"array-callback-return": () => require("./array-callback-return"),
|
||||
"array-element-newline": () => require("./array-element-newline"),
|
||||
"arrow-body-style": () => require("./arrow-body-style"),
|
||||
"arrow-parens": () => require("./arrow-parens"),
|
||||
"arrow-spacing": () => require("./arrow-spacing"),
|
||||
"block-scoped-var": () => require("./block-scoped-var"),
|
||||
"block-spacing": () => require("./block-spacing"),
|
||||
"brace-style": () => require("./brace-style"),
|
||||
"callback-return": () => require("./callback-return"),
|
||||
camelcase: () => require("./camelcase"),
|
||||
"capitalized-comments": () => require("./capitalized-comments"),
|
||||
"class-methods-use-this": () => require("./class-methods-use-this"),
|
||||
"comma-dangle": () => require("./comma-dangle"),
|
||||
"comma-spacing": () => require("./comma-spacing"),
|
||||
"comma-style": () => require("./comma-style"),
|
||||
complexity: () => require("./complexity"),
|
||||
"computed-property-spacing": () =>
|
||||
require("./computed-property-spacing"),
|
||||
"consistent-return": () => require("./consistent-return"),
|
||||
"consistent-this": () => require("./consistent-this"),
|
||||
"constructor-super": () => require("./constructor-super"),
|
||||
curly: () => require("./curly"),
|
||||
"default-case": () => require("./default-case"),
|
||||
"default-case-last": () => require("./default-case-last"),
|
||||
"default-param-last": () => require("./default-param-last"),
|
||||
"dot-location": () => require("./dot-location"),
|
||||
"dot-notation": () => require("./dot-notation"),
|
||||
"eol-last": () => require("./eol-last"),
|
||||
eqeqeq: () => require("./eqeqeq"),
|
||||
"for-direction": () => require("./for-direction"),
|
||||
"func-call-spacing": () => require("./func-call-spacing"),
|
||||
"func-name-matching": () => require("./func-name-matching"),
|
||||
"func-names": () => require("./func-names"),
|
||||
"func-style": () => require("./func-style"),
|
||||
"function-call-argument-newline": () =>
|
||||
require("./function-call-argument-newline"),
|
||||
"function-paren-newline": () => require("./function-paren-newline"),
|
||||
"generator-star-spacing": () => require("./generator-star-spacing"),
|
||||
"getter-return": () => require("./getter-return"),
|
||||
"global-require": () => require("./global-require"),
|
||||
"grouped-accessor-pairs": () => require("./grouped-accessor-pairs"),
|
||||
"guard-for-in": () => require("./guard-for-in"),
|
||||
"handle-callback-err": () => require("./handle-callback-err"),
|
||||
"id-blacklist": () => require("./id-blacklist"),
|
||||
"id-denylist": () => require("./id-denylist"),
|
||||
"id-length": () => require("./id-length"),
|
||||
"id-match": () => require("./id-match"),
|
||||
"implicit-arrow-linebreak": () => require("./implicit-arrow-linebreak"),
|
||||
indent: () => require("./indent"),
|
||||
"indent-legacy": () => require("./indent-legacy"),
|
||||
"init-declarations": () => require("./init-declarations"),
|
||||
"jsx-quotes": () => require("./jsx-quotes"),
|
||||
"key-spacing": () => require("./key-spacing"),
|
||||
"keyword-spacing": () => require("./keyword-spacing"),
|
||||
"line-comment-position": () => require("./line-comment-position"),
|
||||
"linebreak-style": () => require("./linebreak-style"),
|
||||
"lines-around-comment": () => require("./lines-around-comment"),
|
||||
"lines-around-directive": () => require("./lines-around-directive"),
|
||||
"lines-between-class-members": () =>
|
||||
require("./lines-between-class-members"),
|
||||
"logical-assignment-operators": () =>
|
||||
require("./logical-assignment-operators"),
|
||||
"max-classes-per-file": () => require("./max-classes-per-file"),
|
||||
"max-depth": () => require("./max-depth"),
|
||||
"max-len": () => require("./max-len"),
|
||||
"max-lines": () => require("./max-lines"),
|
||||
"max-lines-per-function": () => require("./max-lines-per-function"),
|
||||
"max-nested-callbacks": () => require("./max-nested-callbacks"),
|
||||
"max-params": () => require("./max-params"),
|
||||
"max-statements": () => require("./max-statements"),
|
||||
"max-statements-per-line": () => require("./max-statements-per-line"),
|
||||
"multiline-comment-style": () => require("./multiline-comment-style"),
|
||||
"multiline-ternary": () => require("./multiline-ternary"),
|
||||
"new-cap": () => require("./new-cap"),
|
||||
"new-parens": () => require("./new-parens"),
|
||||
"newline-after-var": () => require("./newline-after-var"),
|
||||
"newline-before-return": () => require("./newline-before-return"),
|
||||
"newline-per-chained-call": () => require("./newline-per-chained-call"),
|
||||
"no-alert": () => require("./no-alert"),
|
||||
"no-array-constructor": () => require("./no-array-constructor"),
|
||||
"no-async-promise-executor": () =>
|
||||
require("./no-async-promise-executor"),
|
||||
"no-await-in-loop": () => require("./no-await-in-loop"),
|
||||
"no-bitwise": () => require("./no-bitwise"),
|
||||
"no-buffer-constructor": () => require("./no-buffer-constructor"),
|
||||
"no-caller": () => require("./no-caller"),
|
||||
"no-case-declarations": () => require("./no-case-declarations"),
|
||||
"no-catch-shadow": () => require("./no-catch-shadow"),
|
||||
"no-class-assign": () => require("./no-class-assign"),
|
||||
"no-compare-neg-zero": () => require("./no-compare-neg-zero"),
|
||||
"no-cond-assign": () => require("./no-cond-assign"),
|
||||
"no-confusing-arrow": () => require("./no-confusing-arrow"),
|
||||
"no-console": () => require("./no-console"),
|
||||
"no-const-assign": () => require("./no-const-assign"),
|
||||
"no-constant-binary-expression": () =>
|
||||
require("./no-constant-binary-expression"),
|
||||
"no-constant-condition": () => require("./no-constant-condition"),
|
||||
"no-constructor-return": () => require("./no-constructor-return"),
|
||||
"no-continue": () => require("./no-continue"),
|
||||
"no-control-regex": () => require("./no-control-regex"),
|
||||
"no-debugger": () => require("./no-debugger"),
|
||||
"no-delete-var": () => require("./no-delete-var"),
|
||||
"no-div-regex": () => require("./no-div-regex"),
|
||||
"no-dupe-args": () => require("./no-dupe-args"),
|
||||
"no-dupe-class-members": () => require("./no-dupe-class-members"),
|
||||
"no-dupe-else-if": () => require("./no-dupe-else-if"),
|
||||
"no-dupe-keys": () => require("./no-dupe-keys"),
|
||||
"no-duplicate-case": () => require("./no-duplicate-case"),
|
||||
"no-duplicate-imports": () => require("./no-duplicate-imports"),
|
||||
"no-else-return": () => require("./no-else-return"),
|
||||
"no-empty": () => require("./no-empty"),
|
||||
"no-empty-character-class": () => require("./no-empty-character-class"),
|
||||
"no-empty-function": () => require("./no-empty-function"),
|
||||
"no-empty-pattern": () => require("./no-empty-pattern"),
|
||||
"no-empty-static-block": () => require("./no-empty-static-block"),
|
||||
"no-eq-null": () => require("./no-eq-null"),
|
||||
"no-eval": () => require("./no-eval"),
|
||||
"no-ex-assign": () => require("./no-ex-assign"),
|
||||
"no-extend-native": () => require("./no-extend-native"),
|
||||
"no-extra-bind": () => require("./no-extra-bind"),
|
||||
"no-extra-boolean-cast": () => require("./no-extra-boolean-cast"),
|
||||
"no-extra-label": () => require("./no-extra-label"),
|
||||
"no-extra-parens": () => require("./no-extra-parens"),
|
||||
"no-extra-semi": () => require("./no-extra-semi"),
|
||||
"no-fallthrough": () => require("./no-fallthrough"),
|
||||
"no-floating-decimal": () => require("./no-floating-decimal"),
|
||||
"no-func-assign": () => require("./no-func-assign"),
|
||||
"no-global-assign": () => require("./no-global-assign"),
|
||||
"no-implicit-coercion": () => require("./no-implicit-coercion"),
|
||||
"no-implicit-globals": () => require("./no-implicit-globals"),
|
||||
"no-implied-eval": () => require("./no-implied-eval"),
|
||||
"no-import-assign": () => require("./no-import-assign"),
|
||||
"no-inline-comments": () => require("./no-inline-comments"),
|
||||
"no-inner-declarations": () => require("./no-inner-declarations"),
|
||||
"no-invalid-regexp": () => require("./no-invalid-regexp"),
|
||||
"no-invalid-this": () => require("./no-invalid-this"),
|
||||
"no-irregular-whitespace": () => require("./no-irregular-whitespace"),
|
||||
"no-iterator": () => require("./no-iterator"),
|
||||
"no-label-var": () => require("./no-label-var"),
|
||||
"no-labels": () => require("./no-labels"),
|
||||
"no-lone-blocks": () => require("./no-lone-blocks"),
|
||||
"no-lonely-if": () => require("./no-lonely-if"),
|
||||
"no-loop-func": () => require("./no-loop-func"),
|
||||
"no-loss-of-precision": () => require("./no-loss-of-precision"),
|
||||
"no-magic-numbers": () => require("./no-magic-numbers"),
|
||||
"no-misleading-character-class": () =>
|
||||
require("./no-misleading-character-class"),
|
||||
"no-mixed-operators": () => require("./no-mixed-operators"),
|
||||
"no-mixed-requires": () => require("./no-mixed-requires"),
|
||||
"no-mixed-spaces-and-tabs": () => require("./no-mixed-spaces-and-tabs"),
|
||||
"no-multi-assign": () => require("./no-multi-assign"),
|
||||
"no-multi-spaces": () => require("./no-multi-spaces"),
|
||||
"no-multi-str": () => require("./no-multi-str"),
|
||||
"no-multiple-empty-lines": () => require("./no-multiple-empty-lines"),
|
||||
"no-native-reassign": () => require("./no-native-reassign"),
|
||||
"no-negated-condition": () => require("./no-negated-condition"),
|
||||
"no-negated-in-lhs": () => require("./no-negated-in-lhs"),
|
||||
"no-nested-ternary": () => require("./no-nested-ternary"),
|
||||
"no-new": () => require("./no-new"),
|
||||
"no-new-func": () => require("./no-new-func"),
|
||||
"no-new-native-nonconstructor": () =>
|
||||
require("./no-new-native-nonconstructor"),
|
||||
"no-new-object": () => require("./no-new-object"),
|
||||
"no-new-require": () => require("./no-new-require"),
|
||||
"no-new-symbol": () => require("./no-new-symbol"),
|
||||
"no-new-wrappers": () => require("./no-new-wrappers"),
|
||||
"no-nonoctal-decimal-escape": () =>
|
||||
require("./no-nonoctal-decimal-escape"),
|
||||
"no-obj-calls": () => require("./no-obj-calls"),
|
||||
"no-object-constructor": () => require("./no-object-constructor"),
|
||||
"no-octal": () => require("./no-octal"),
|
||||
"no-octal-escape": () => require("./no-octal-escape"),
|
||||
"no-param-reassign": () => require("./no-param-reassign"),
|
||||
"no-path-concat": () => require("./no-path-concat"),
|
||||
"no-plusplus": () => require("./no-plusplus"),
|
||||
"no-process-env": () => require("./no-process-env"),
|
||||
"no-process-exit": () => require("./no-process-exit"),
|
||||
"no-promise-executor-return": () =>
|
||||
require("./no-promise-executor-return"),
|
||||
"no-proto": () => require("./no-proto"),
|
||||
"no-prototype-builtins": () => require("./no-prototype-builtins"),
|
||||
"no-redeclare": () => require("./no-redeclare"),
|
||||
"no-regex-spaces": () => require("./no-regex-spaces"),
|
||||
"no-restricted-exports": () => require("./no-restricted-exports"),
|
||||
"no-restricted-globals": () => require("./no-restricted-globals"),
|
||||
"no-restricted-imports": () => require("./no-restricted-imports"),
|
||||
"no-restricted-modules": () => require("./no-restricted-modules"),
|
||||
"no-restricted-properties": () => require("./no-restricted-properties"),
|
||||
"no-restricted-syntax": () => require("./no-restricted-syntax"),
|
||||
"no-return-assign": () => require("./no-return-assign"),
|
||||
"no-return-await": () => require("./no-return-await"),
|
||||
"no-script-url": () => require("./no-script-url"),
|
||||
"no-self-assign": () => require("./no-self-assign"),
|
||||
"no-self-compare": () => require("./no-self-compare"),
|
||||
"no-sequences": () => require("./no-sequences"),
|
||||
"no-setter-return": () => require("./no-setter-return"),
|
||||
"no-shadow": () => require("./no-shadow"),
|
||||
"no-shadow-restricted-names": () =>
|
||||
require("./no-shadow-restricted-names"),
|
||||
"no-spaced-func": () => require("./no-spaced-func"),
|
||||
"no-sparse-arrays": () => require("./no-sparse-arrays"),
|
||||
"no-sync": () => require("./no-sync"),
|
||||
"no-tabs": () => require("./no-tabs"),
|
||||
"no-template-curly-in-string": () =>
|
||||
require("./no-template-curly-in-string"),
|
||||
"no-ternary": () => require("./no-ternary"),
|
||||
"no-this-before-super": () => require("./no-this-before-super"),
|
||||
"no-throw-literal": () => require("./no-throw-literal"),
|
||||
"no-trailing-spaces": () => require("./no-trailing-spaces"),
|
||||
"no-undef": () => require("./no-undef"),
|
||||
"no-undef-init": () => require("./no-undef-init"),
|
||||
"no-undefined": () => require("./no-undefined"),
|
||||
"no-underscore-dangle": () => require("./no-underscore-dangle"),
|
||||
"no-unexpected-multiline": () => require("./no-unexpected-multiline"),
|
||||
"no-unmodified-loop-condition": () =>
|
||||
require("./no-unmodified-loop-condition"),
|
||||
"no-unneeded-ternary": () => require("./no-unneeded-ternary"),
|
||||
"no-unreachable": () => require("./no-unreachable"),
|
||||
"no-unreachable-loop": () => require("./no-unreachable-loop"),
|
||||
"no-unsafe-finally": () => require("./no-unsafe-finally"),
|
||||
"no-unsafe-negation": () => require("./no-unsafe-negation"),
|
||||
"no-unsafe-optional-chaining": () =>
|
||||
require("./no-unsafe-optional-chaining"),
|
||||
"no-unused-expressions": () => require("./no-unused-expressions"),
|
||||
"no-unused-labels": () => require("./no-unused-labels"),
|
||||
"no-unused-private-class-members": () =>
|
||||
require("./no-unused-private-class-members"),
|
||||
"no-unused-vars": () => require("./no-unused-vars"),
|
||||
"no-use-before-define": () => require("./no-use-before-define"),
|
||||
"no-useless-assignment": () => require("./no-useless-assignment"),
|
||||
"no-useless-backreference": () => require("./no-useless-backreference"),
|
||||
"no-useless-call": () => require("./no-useless-call"),
|
||||
"no-useless-catch": () => require("./no-useless-catch"),
|
||||
"no-useless-computed-key": () => require("./no-useless-computed-key"),
|
||||
"no-useless-concat": () => require("./no-useless-concat"),
|
||||
"no-useless-constructor": () => require("./no-useless-constructor"),
|
||||
"no-useless-escape": () => require("./no-useless-escape"),
|
||||
"no-useless-rename": () => require("./no-useless-rename"),
|
||||
"no-useless-return": () => require("./no-useless-return"),
|
||||
"no-var": () => require("./no-var"),
|
||||
"no-void": () => require("./no-void"),
|
||||
"no-warning-comments": () => require("./no-warning-comments"),
|
||||
"no-whitespace-before-property": () =>
|
||||
require("./no-whitespace-before-property"),
|
||||
"no-with": () => require("./no-with"),
|
||||
"nonblock-statement-body-position": () =>
|
||||
require("./nonblock-statement-body-position"),
|
||||
"object-curly-newline": () => require("./object-curly-newline"),
|
||||
"object-curly-spacing": () => require("./object-curly-spacing"),
|
||||
"object-property-newline": () => require("./object-property-newline"),
|
||||
"object-shorthand": () => require("./object-shorthand"),
|
||||
"one-var": () => require("./one-var"),
|
||||
"one-var-declaration-per-line": () =>
|
||||
require("./one-var-declaration-per-line"),
|
||||
"operator-assignment": () => require("./operator-assignment"),
|
||||
"operator-linebreak": () => require("./operator-linebreak"),
|
||||
"padded-blocks": () => require("./padded-blocks"),
|
||||
"padding-line-between-statements": () =>
|
||||
require("./padding-line-between-statements"),
|
||||
"prefer-arrow-callback": () => require("./prefer-arrow-callback"),
|
||||
"prefer-const": () => require("./prefer-const"),
|
||||
"prefer-destructuring": () => require("./prefer-destructuring"),
|
||||
"prefer-exponentiation-operator": () =>
|
||||
require("./prefer-exponentiation-operator"),
|
||||
"prefer-named-capture-group": () =>
|
||||
require("./prefer-named-capture-group"),
|
||||
"prefer-numeric-literals": () => require("./prefer-numeric-literals"),
|
||||
"prefer-object-has-own": () => require("./prefer-object-has-own"),
|
||||
"prefer-object-spread": () => require("./prefer-object-spread"),
|
||||
"prefer-promise-reject-errors": () =>
|
||||
require("./prefer-promise-reject-errors"),
|
||||
"prefer-reflect": () => require("./prefer-reflect"),
|
||||
"prefer-regex-literals": () => require("./prefer-regex-literals"),
|
||||
"prefer-rest-params": () => require("./prefer-rest-params"),
|
||||
"prefer-spread": () => require("./prefer-spread"),
|
||||
"prefer-template": () => require("./prefer-template"),
|
||||
"quote-props": () => require("./quote-props"),
|
||||
quotes: () => require("./quotes"),
|
||||
radix: () => require("./radix"),
|
||||
"require-atomic-updates": () => require("./require-atomic-updates"),
|
||||
"require-await": () => require("./require-await"),
|
||||
"require-unicode-regexp": () => require("./require-unicode-regexp"),
|
||||
"require-yield": () => require("./require-yield"),
|
||||
"rest-spread-spacing": () => require("./rest-spread-spacing"),
|
||||
semi: () => require("./semi"),
|
||||
"semi-spacing": () => require("./semi-spacing"),
|
||||
"semi-style": () => require("./semi-style"),
|
||||
"sort-imports": () => require("./sort-imports"),
|
||||
"sort-keys": () => require("./sort-keys"),
|
||||
"sort-vars": () => require("./sort-vars"),
|
||||
"space-before-blocks": () => require("./space-before-blocks"),
|
||||
"space-before-function-paren": () =>
|
||||
require("./space-before-function-paren"),
|
||||
"space-in-parens": () => require("./space-in-parens"),
|
||||
"space-infix-ops": () => require("./space-infix-ops"),
|
||||
"space-unary-ops": () => require("./space-unary-ops"),
|
||||
"spaced-comment": () => require("./spaced-comment"),
|
||||
strict: () => require("./strict"),
|
||||
"switch-colon-spacing": () => require("./switch-colon-spacing"),
|
||||
"symbol-description": () => require("./symbol-description"),
|
||||
"template-curly-spacing": () => require("./template-curly-spacing"),
|
||||
"template-tag-spacing": () => require("./template-tag-spacing"),
|
||||
"unicode-bom": () => require("./unicode-bom"),
|
||||
"use-isnan": () => require("./use-isnan"),
|
||||
"valid-typeof": () => require("./valid-typeof"),
|
||||
"vars-on-top": () => require("./vars-on-top"),
|
||||
"wrap-iife": () => require("./wrap-iife"),
|
||||
"wrap-regex": () => require("./wrap-regex"),
|
||||
"yield-star-spacing": () => require("./yield-star-spacing"),
|
||||
yoda: () => require("./yoda"),
|
||||
}),
|
||||
);
|
||||
Reference in New Issue
Block a user