update
This commit is contained in:
@@ -0,0 +1,14 @@
|
||||
import { ParsedLocation, ScrollRestorationEntry, ScrollRestorationOptions } from '@tanstack/router-core';
|
||||
/**
|
||||
* @deprecated use createRouter's `scrollRestoration` option instead
|
||||
*/
|
||||
export declare function ScrollRestoration(_props: ScrollRestorationOptions): null;
|
||||
export declare function useElementScrollRestoration(options: ({
|
||||
id: string;
|
||||
getElement?: () => Window | Element | undefined | null;
|
||||
} | {
|
||||
id?: string;
|
||||
getElement: () => Window | Element | undefined | null;
|
||||
}) & {
|
||||
getKey?: (location: ParsedLocation) => string;
|
||||
}): ScrollRestorationEntry | undefined;
|
||||
@@ -0,0 +1,233 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, 'commentRegex', {
|
||||
get: function getCommentRegex () {
|
||||
// Groups: 1: media type, 2: MIME type, 3: charset, 4: encoding, 5: data.
|
||||
return /^\s*?\/[\/\*][@#]\s+?sourceMappingURL=data:(((?:application|text)\/json)(?:;charset=([^;,]+?)?)?)?(?:;(base64))?,(.*?)$/mg;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
Object.defineProperty(exports, 'mapFileCommentRegex', {
|
||||
get: function getMapFileCommentRegex () {
|
||||
// Matches sourceMappingURL in either // or /* comment styles.
|
||||
return /(?:\/\/[@#][ \t]+?sourceMappingURL=([^\s'"`]+?)[ \t]*?$)|(?:\/\*[@#][ \t]+sourceMappingURL=([^*]+?)[ \t]*?(?:\*\/){1}[ \t]*?$)/mg;
|
||||
}
|
||||
});
|
||||
|
||||
var decodeBase64;
|
||||
if (typeof Buffer !== 'undefined') {
|
||||
if (typeof Buffer.from === 'function') {
|
||||
decodeBase64 = decodeBase64WithBufferFrom;
|
||||
} else {
|
||||
decodeBase64 = decodeBase64WithNewBuffer;
|
||||
}
|
||||
} else {
|
||||
decodeBase64 = decodeBase64WithAtob;
|
||||
}
|
||||
|
||||
function decodeBase64WithBufferFrom(base64) {
|
||||
return Buffer.from(base64, 'base64').toString();
|
||||
}
|
||||
|
||||
function decodeBase64WithNewBuffer(base64) {
|
||||
if (typeof value === 'number') {
|
||||
throw new TypeError('The value to decode must not be of type number.');
|
||||
}
|
||||
return new Buffer(base64, 'base64').toString();
|
||||
}
|
||||
|
||||
function decodeBase64WithAtob(base64) {
|
||||
return decodeURIComponent(escape(atob(base64)));
|
||||
}
|
||||
|
||||
function stripComment(sm) {
|
||||
return sm.split(',').pop();
|
||||
}
|
||||
|
||||
function readFromFileMap(sm, read) {
|
||||
var r = exports.mapFileCommentRegex.exec(sm);
|
||||
// for some odd reason //# .. captures in 1 and /* .. */ in 2
|
||||
var filename = r[1] || r[2];
|
||||
|
||||
try {
|
||||
var sm = read(filename);
|
||||
if (sm != null && typeof sm.catch === 'function') {
|
||||
return sm.catch(throwError);
|
||||
} else {
|
||||
return sm;
|
||||
}
|
||||
} catch (e) {
|
||||
throwError(e);
|
||||
}
|
||||
|
||||
function throwError(e) {
|
||||
throw new Error('An error occurred while trying to read the map file at ' + filename + '\n' + e.stack);
|
||||
}
|
||||
}
|
||||
|
||||
function Converter (sm, opts) {
|
||||
opts = opts || {};
|
||||
|
||||
if (opts.hasComment) {
|
||||
sm = stripComment(sm);
|
||||
}
|
||||
|
||||
if (opts.encoding === 'base64') {
|
||||
sm = decodeBase64(sm);
|
||||
} else if (opts.encoding === 'uri') {
|
||||
sm = decodeURIComponent(sm);
|
||||
}
|
||||
|
||||
if (opts.isJSON || opts.encoding) {
|
||||
sm = JSON.parse(sm);
|
||||
}
|
||||
|
||||
this.sourcemap = sm;
|
||||
}
|
||||
|
||||
Converter.prototype.toJSON = function (space) {
|
||||
return JSON.stringify(this.sourcemap, null, space);
|
||||
};
|
||||
|
||||
if (typeof Buffer !== 'undefined') {
|
||||
if (typeof Buffer.from === 'function') {
|
||||
Converter.prototype.toBase64 = encodeBase64WithBufferFrom;
|
||||
} else {
|
||||
Converter.prototype.toBase64 = encodeBase64WithNewBuffer;
|
||||
}
|
||||
} else {
|
||||
Converter.prototype.toBase64 = encodeBase64WithBtoa;
|
||||
}
|
||||
|
||||
function encodeBase64WithBufferFrom() {
|
||||
var json = this.toJSON();
|
||||
return Buffer.from(json, 'utf8').toString('base64');
|
||||
}
|
||||
|
||||
function encodeBase64WithNewBuffer() {
|
||||
var json = this.toJSON();
|
||||
if (typeof json === 'number') {
|
||||
throw new TypeError('The json to encode must not be of type number.');
|
||||
}
|
||||
return new Buffer(json, 'utf8').toString('base64');
|
||||
}
|
||||
|
||||
function encodeBase64WithBtoa() {
|
||||
var json = this.toJSON();
|
||||
return btoa(unescape(encodeURIComponent(json)));
|
||||
}
|
||||
|
||||
Converter.prototype.toURI = function () {
|
||||
var json = this.toJSON();
|
||||
return encodeURIComponent(json);
|
||||
};
|
||||
|
||||
Converter.prototype.toComment = function (options) {
|
||||
var encoding, content, data;
|
||||
if (options != null && options.encoding === 'uri') {
|
||||
encoding = '';
|
||||
content = this.toURI();
|
||||
} else {
|
||||
encoding = ';base64';
|
||||
content = this.toBase64();
|
||||
}
|
||||
data = 'sourceMappingURL=data:application/json;charset=utf-8' + encoding + ',' + content;
|
||||
return options != null && options.multiline ? '/*# ' + data + ' */' : '//# ' + data;
|
||||
};
|
||||
|
||||
// returns copy instead of original
|
||||
Converter.prototype.toObject = function () {
|
||||
return JSON.parse(this.toJSON());
|
||||
};
|
||||
|
||||
Converter.prototype.addProperty = function (key, value) {
|
||||
if (this.sourcemap.hasOwnProperty(key)) throw new Error('property "' + key + '" already exists on the sourcemap, use set property instead');
|
||||
return this.setProperty(key, value);
|
||||
};
|
||||
|
||||
Converter.prototype.setProperty = function (key, value) {
|
||||
this.sourcemap[key] = value;
|
||||
return this;
|
||||
};
|
||||
|
||||
Converter.prototype.getProperty = function (key) {
|
||||
return this.sourcemap[key];
|
||||
};
|
||||
|
||||
exports.fromObject = function (obj) {
|
||||
return new Converter(obj);
|
||||
};
|
||||
|
||||
exports.fromJSON = function (json) {
|
||||
return new Converter(json, { isJSON: true });
|
||||
};
|
||||
|
||||
exports.fromURI = function (uri) {
|
||||
return new Converter(uri, { encoding: 'uri' });
|
||||
};
|
||||
|
||||
exports.fromBase64 = function (base64) {
|
||||
return new Converter(base64, { encoding: 'base64' });
|
||||
};
|
||||
|
||||
exports.fromComment = function (comment) {
|
||||
var m, encoding;
|
||||
comment = comment
|
||||
.replace(/^\/\*/g, '//')
|
||||
.replace(/\*\/$/g, '');
|
||||
m = exports.commentRegex.exec(comment);
|
||||
encoding = m && m[4] || 'uri';
|
||||
return new Converter(comment, { encoding: encoding, hasComment: true });
|
||||
};
|
||||
|
||||
function makeConverter(sm) {
|
||||
return new Converter(sm, { isJSON: true });
|
||||
}
|
||||
|
||||
exports.fromMapFileComment = function (comment, read) {
|
||||
if (typeof read === 'string') {
|
||||
throw new Error(
|
||||
'String directory paths are no longer supported with `fromMapFileComment`\n' +
|
||||
'Please review the Upgrading documentation at https://github.com/thlorenz/convert-source-map#upgrading'
|
||||
)
|
||||
}
|
||||
|
||||
var sm = readFromFileMap(comment, read);
|
||||
if (sm != null && typeof sm.then === 'function') {
|
||||
return sm.then(makeConverter);
|
||||
} else {
|
||||
return makeConverter(sm);
|
||||
}
|
||||
};
|
||||
|
||||
// Finds last sourcemap comment in file or returns null if none was found
|
||||
exports.fromSource = function (content) {
|
||||
var m = content.match(exports.commentRegex);
|
||||
return m ? exports.fromComment(m.pop()) : null;
|
||||
};
|
||||
|
||||
// Finds last sourcemap comment in file or returns null if none was found
|
||||
exports.fromMapFileSource = function (content, read) {
|
||||
if (typeof read === 'string') {
|
||||
throw new Error(
|
||||
'String directory paths are no longer supported with `fromMapFileSource`\n' +
|
||||
'Please review the Upgrading documentation at https://github.com/thlorenz/convert-source-map#upgrading'
|
||||
)
|
||||
}
|
||||
var m = content.match(exports.mapFileCommentRegex);
|
||||
return m ? exports.fromMapFileComment(m.pop(), read) : null;
|
||||
};
|
||||
|
||||
exports.removeComments = function (src) {
|
||||
return src.replace(exports.commentRegex, '');
|
||||
};
|
||||
|
||||
exports.removeMapFileComments = function (src) {
|
||||
return src.replace(exports.mapFileCommentRegex, '');
|
||||
};
|
||||
|
||||
exports.generateMapFileComment = function (file, options) {
|
||||
var data = 'sourceMappingURL=' + file;
|
||||
return options && options.multiline ? '/*# ' + data + ' */' : '//# ' + data;
|
||||
};
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"scroll-restoration.js","sources":["../../src/scroll-restoration.tsx"],"sourcesContent":["import {\n defaultGetScrollRestorationKey,\n restoreScroll,\n storageKey,\n} from '@tanstack/router-core'\nimport { useRouter } from './useRouter'\nimport { ScriptOnce } from './ScriptOnce'\n\nexport function ScrollRestoration() {\n const router = useRouter()\n const getKey =\n router.options.getScrollRestorationKey || defaultGetScrollRestorationKey\n const userKey = getKey(router.latestLocation)\n const resolvedKey =\n userKey !== defaultGetScrollRestorationKey(router.latestLocation)\n ? userKey\n : null\n\n if (!router.isScrollRestoring || !router.isServer) {\n return null\n }\n\n return (\n <ScriptOnce\n children={`(${restoreScroll.toString()})(${JSON.stringify(storageKey)},${JSON.stringify(resolvedKey)}, undefined, true)`}\n log={false}\n />\n )\n}\n"],"names":[],"mappings":";;;;AAQO,SAAS,oBAAoB;AAClC,QAAM,SAAS,UAAU;AACnB,QAAA,SACJ,OAAO,QAAQ,2BAA2B;AACtC,QAAA,UAAU,OAAO,OAAO,cAAc;AAC5C,QAAM,cACJ,YAAY,+BAA+B,OAAO,cAAc,IAC5D,UACA;AAEN,MAAI,CAAC,OAAO,qBAAqB,CAAC,OAAO,UAAU;AAC1C,WAAA;AAAA,EAAA;AAIP,SAAA;AAAA,IAAC;AAAA,IAAA;AAAA,MACC,UAAU,IAAI,cAAc,SAAU,CAAA,KAAK,KAAK,UAAU,UAAU,CAAC,IAAI,KAAK,UAAU,WAAW,CAAC;AAAA,MACpG,KAAK;AAAA,IAAA;AAAA,EACP;AAEJ;"}
|
||||
@@ -0,0 +1,673 @@
|
||||
// src/utils.ts
|
||||
var HOLE = -1;
|
||||
var NAN = -2;
|
||||
var NEGATIVE_INFINITY = -3;
|
||||
var NEGATIVE_ZERO = -4;
|
||||
var NULL = -5;
|
||||
var POSITIVE_INFINITY = -6;
|
||||
var UNDEFINED = -7;
|
||||
var TYPE_BIGINT = "B";
|
||||
var TYPE_DATE = "D";
|
||||
var TYPE_ERROR = "E";
|
||||
var TYPE_MAP = "M";
|
||||
var TYPE_NULL_OBJECT = "N";
|
||||
var TYPE_PROMISE = "P";
|
||||
var TYPE_REGEXP = "R";
|
||||
var TYPE_SET = "S";
|
||||
var TYPE_SYMBOL = "Y";
|
||||
var TYPE_URL = "U";
|
||||
var TYPE_PREVIOUS_RESOLVED = "Z";
|
||||
var Deferred = class {
|
||||
promise;
|
||||
resolve;
|
||||
reject;
|
||||
constructor() {
|
||||
this.promise = new Promise((resolve, reject) => {
|
||||
this.resolve = resolve;
|
||||
this.reject = reject;
|
||||
});
|
||||
}
|
||||
};
|
||||
function createLineSplittingTransform() {
|
||||
const decoder = new TextDecoder();
|
||||
let leftover = "";
|
||||
return new TransformStream({
|
||||
transform(chunk, controller) {
|
||||
const str = decoder.decode(chunk, { stream: true });
|
||||
const parts = (leftover + str).split("\n");
|
||||
leftover = parts.pop() || "";
|
||||
for (const part of parts) {
|
||||
controller.enqueue(part);
|
||||
}
|
||||
},
|
||||
flush(controller) {
|
||||
if (leftover) {
|
||||
controller.enqueue(leftover);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// src/flatten.ts
|
||||
function flatten(input) {
|
||||
const { indices } = this;
|
||||
const existing = indices.get(input);
|
||||
if (existing)
|
||||
return [existing];
|
||||
if (input === void 0)
|
||||
return UNDEFINED;
|
||||
if (input === null)
|
||||
return NULL;
|
||||
if (Number.isNaN(input))
|
||||
return NAN;
|
||||
if (input === Number.POSITIVE_INFINITY)
|
||||
return POSITIVE_INFINITY;
|
||||
if (input === Number.NEGATIVE_INFINITY)
|
||||
return NEGATIVE_INFINITY;
|
||||
if (input === 0 && 1 / input < 0)
|
||||
return NEGATIVE_ZERO;
|
||||
const index = this.index++;
|
||||
indices.set(input, index);
|
||||
stringify.call(this, input, index);
|
||||
return index;
|
||||
}
|
||||
function stringify(input, index) {
|
||||
const { deferred, plugins, postPlugins } = this;
|
||||
const str = this.stringified;
|
||||
const stack = [[input, index]];
|
||||
while (stack.length > 0) {
|
||||
const [input2, index2] = stack.pop();
|
||||
const partsForObj = (obj) => Object.keys(obj).map((k) => `"_${flatten.call(this, k)}":${flatten.call(this, obj[k])}`).join(",");
|
||||
let error = null;
|
||||
switch (typeof input2) {
|
||||
case "boolean":
|
||||
case "number":
|
||||
case "string":
|
||||
str[index2] = JSON.stringify(input2);
|
||||
break;
|
||||
case "bigint":
|
||||
str[index2] = `["${TYPE_BIGINT}","${input2}"]`;
|
||||
break;
|
||||
case "symbol": {
|
||||
const keyFor = Symbol.keyFor(input2);
|
||||
if (!keyFor) {
|
||||
error = new Error(
|
||||
"Cannot encode symbol unless created with Symbol.for()"
|
||||
);
|
||||
} else {
|
||||
str[index2] = `["${TYPE_SYMBOL}",${JSON.stringify(keyFor)}]`;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "object": {
|
||||
if (!input2) {
|
||||
str[index2] = `${NULL}`;
|
||||
break;
|
||||
}
|
||||
const isArray = Array.isArray(input2);
|
||||
let pluginHandled = false;
|
||||
if (!isArray && plugins) {
|
||||
for (const plugin of plugins) {
|
||||
const pluginResult = plugin(input2);
|
||||
if (Array.isArray(pluginResult)) {
|
||||
pluginHandled = true;
|
||||
const [pluginIdentifier, ...rest] = pluginResult;
|
||||
str[index2] = `[${JSON.stringify(pluginIdentifier)}`;
|
||||
if (rest.length > 0) {
|
||||
str[index2] += `,${rest.map((v) => flatten.call(this, v)).join(",")}`;
|
||||
}
|
||||
str[index2] += "]";
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!pluginHandled) {
|
||||
let result = isArray ? "[" : "{";
|
||||
if (isArray) {
|
||||
for (let i = 0; i < input2.length; i++)
|
||||
result += (i ? "," : "") + (i in input2 ? flatten.call(this, input2[i]) : HOLE);
|
||||
str[index2] = `${result}]`;
|
||||
} else if (input2 instanceof Date) {
|
||||
str[index2] = `["${TYPE_DATE}",${input2.getTime()}]`;
|
||||
} else if (input2 instanceof URL) {
|
||||
str[index2] = `["${TYPE_URL}",${JSON.stringify(input2.href)}]`;
|
||||
} else if (input2 instanceof RegExp) {
|
||||
str[index2] = `["${TYPE_REGEXP}",${JSON.stringify(
|
||||
input2.source
|
||||
)},${JSON.stringify(input2.flags)}]`;
|
||||
} else if (input2 instanceof Set) {
|
||||
if (input2.size > 0) {
|
||||
str[index2] = `["${TYPE_SET}",${[...input2].map((val) => flatten.call(this, val)).join(",")}]`;
|
||||
} else {
|
||||
str[index2] = `["${TYPE_SET}"]`;
|
||||
}
|
||||
} else if (input2 instanceof Map) {
|
||||
if (input2.size > 0) {
|
||||
str[index2] = `["${TYPE_MAP}",${[...input2].flatMap(([k, v]) => [
|
||||
flatten.call(this, k),
|
||||
flatten.call(this, v)
|
||||
]).join(",")}]`;
|
||||
} else {
|
||||
str[index2] = `["${TYPE_MAP}"]`;
|
||||
}
|
||||
} else if (input2 instanceof Promise) {
|
||||
str[index2] = `["${TYPE_PROMISE}",${index2}]`;
|
||||
deferred[index2] = input2;
|
||||
} else if (input2 instanceof Error) {
|
||||
str[index2] = `["${TYPE_ERROR}",${JSON.stringify(input2.message)}`;
|
||||
if (input2.name !== "Error") {
|
||||
str[index2] += `,${JSON.stringify(input2.name)}`;
|
||||
}
|
||||
str[index2] += "]";
|
||||
} else if (Object.getPrototypeOf(input2) === null) {
|
||||
str[index2] = `["${TYPE_NULL_OBJECT}",{${partsForObj(input2)}}]`;
|
||||
} else if (isPlainObject(input2)) {
|
||||
str[index2] = `{${partsForObj(input2)}}`;
|
||||
} else {
|
||||
error = new Error("Cannot encode object with prototype");
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
const isArray = Array.isArray(input2);
|
||||
let pluginHandled = false;
|
||||
if (!isArray && plugins) {
|
||||
for (const plugin of plugins) {
|
||||
const pluginResult = plugin(input2);
|
||||
if (Array.isArray(pluginResult)) {
|
||||
pluginHandled = true;
|
||||
const [pluginIdentifier, ...rest] = pluginResult;
|
||||
str[index2] = `[${JSON.stringify(pluginIdentifier)}`;
|
||||
if (rest.length > 0) {
|
||||
str[index2] += `,${rest.map((v) => flatten.call(this, v)).join(",")}`;
|
||||
}
|
||||
str[index2] += "]";
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!pluginHandled) {
|
||||
error = new Error("Cannot encode function or unexpected type");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (error) {
|
||||
let pluginHandled = false;
|
||||
if (postPlugins) {
|
||||
for (const plugin of postPlugins) {
|
||||
const pluginResult = plugin(input2);
|
||||
if (Array.isArray(pluginResult)) {
|
||||
pluginHandled = true;
|
||||
const [pluginIdentifier, ...rest] = pluginResult;
|
||||
str[index2] = `[${JSON.stringify(pluginIdentifier)}`;
|
||||
if (rest.length > 0) {
|
||||
str[index2] += `,${rest.map((v) => flatten.call(this, v)).join(",")}`;
|
||||
}
|
||||
str[index2] += "]";
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!pluginHandled) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
var objectProtoNames = Object.getOwnPropertyNames(Object.prototype).sort().join("\0");
|
||||
function isPlainObject(thing) {
|
||||
const proto = Object.getPrototypeOf(thing);
|
||||
return proto === Object.prototype || proto === null || Object.getOwnPropertyNames(proto).sort().join("\0") === objectProtoNames;
|
||||
}
|
||||
|
||||
// src/unflatten.ts
|
||||
var globalObj = typeof window !== "undefined" ? window : typeof globalThis !== "undefined" ? globalThis : void 0;
|
||||
function unflatten(parsed) {
|
||||
const { hydrated, values } = this;
|
||||
if (typeof parsed === "number")
|
||||
return hydrate.call(this, parsed);
|
||||
if (!Array.isArray(parsed) || !parsed.length)
|
||||
throw new SyntaxError();
|
||||
const startIndex = values.length;
|
||||
for (const value of parsed) {
|
||||
values.push(value);
|
||||
}
|
||||
hydrated.length = values.length;
|
||||
return hydrate.call(this, startIndex);
|
||||
}
|
||||
function hydrate(index) {
|
||||
const { hydrated, values, deferred, plugins } = this;
|
||||
let result;
|
||||
const stack = [
|
||||
[
|
||||
index,
|
||||
(v) => {
|
||||
result = v;
|
||||
}
|
||||
]
|
||||
];
|
||||
let postRun = [];
|
||||
while (stack.length > 0) {
|
||||
const [index2, set] = stack.pop();
|
||||
switch (index2) {
|
||||
case UNDEFINED:
|
||||
set(void 0);
|
||||
continue;
|
||||
case NULL:
|
||||
set(null);
|
||||
continue;
|
||||
case NAN:
|
||||
set(NaN);
|
||||
continue;
|
||||
case POSITIVE_INFINITY:
|
||||
set(Infinity);
|
||||
continue;
|
||||
case NEGATIVE_INFINITY:
|
||||
set(-Infinity);
|
||||
continue;
|
||||
case NEGATIVE_ZERO:
|
||||
set(-0);
|
||||
continue;
|
||||
}
|
||||
if (hydrated[index2]) {
|
||||
set(hydrated[index2]);
|
||||
continue;
|
||||
}
|
||||
const value = values[index2];
|
||||
if (!value || typeof value !== "object") {
|
||||
hydrated[index2] = value;
|
||||
set(value);
|
||||
continue;
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
if (typeof value[0] === "string") {
|
||||
const [type, b, c] = value;
|
||||
switch (type) {
|
||||
case TYPE_DATE:
|
||||
set(hydrated[index2] = new Date(b));
|
||||
continue;
|
||||
case TYPE_URL:
|
||||
set(hydrated[index2] = new URL(b));
|
||||
continue;
|
||||
case TYPE_BIGINT:
|
||||
set(hydrated[index2] = BigInt(b));
|
||||
continue;
|
||||
case TYPE_REGEXP:
|
||||
set(hydrated[index2] = new RegExp(b, c));
|
||||
continue;
|
||||
case TYPE_SYMBOL:
|
||||
set(hydrated[index2] = Symbol.for(b));
|
||||
continue;
|
||||
case TYPE_SET:
|
||||
const newSet = /* @__PURE__ */ new Set();
|
||||
hydrated[index2] = newSet;
|
||||
for (let i = 1; i < value.length; i++)
|
||||
stack.push([
|
||||
value[i],
|
||||
(v) => {
|
||||
newSet.add(v);
|
||||
}
|
||||
]);
|
||||
set(newSet);
|
||||
continue;
|
||||
case TYPE_MAP:
|
||||
const map = /* @__PURE__ */ new Map();
|
||||
hydrated[index2] = map;
|
||||
for (let i = 1; i < value.length; i += 2) {
|
||||
const r = [];
|
||||
stack.push([
|
||||
value[i + 1],
|
||||
(v) => {
|
||||
r[1] = v;
|
||||
}
|
||||
]);
|
||||
stack.push([
|
||||
value[i],
|
||||
(k) => {
|
||||
r[0] = k;
|
||||
}
|
||||
]);
|
||||
postRun.push(() => {
|
||||
map.set(r[0], r[1]);
|
||||
});
|
||||
}
|
||||
set(map);
|
||||
continue;
|
||||
case TYPE_NULL_OBJECT:
|
||||
const obj = /* @__PURE__ */ Object.create(null);
|
||||
hydrated[index2] = obj;
|
||||
for (const key of Object.keys(b).reverse()) {
|
||||
const r = [];
|
||||
stack.push([
|
||||
b[key],
|
||||
(v) => {
|
||||
r[1] = v;
|
||||
}
|
||||
]);
|
||||
stack.push([
|
||||
Number(key.slice(1)),
|
||||
(k) => {
|
||||
r[0] = k;
|
||||
}
|
||||
]);
|
||||
postRun.push(() => {
|
||||
obj[r[0]] = r[1];
|
||||
});
|
||||
}
|
||||
set(obj);
|
||||
continue;
|
||||
case TYPE_PROMISE:
|
||||
if (hydrated[b]) {
|
||||
set(hydrated[index2] = hydrated[b]);
|
||||
} else {
|
||||
const d = new Deferred();
|
||||
deferred[b] = d;
|
||||
set(hydrated[index2] = d.promise);
|
||||
}
|
||||
continue;
|
||||
case TYPE_ERROR:
|
||||
const [, message, errorType] = value;
|
||||
let error = errorType && globalObj && globalObj[errorType] ? new globalObj[errorType](message) : new Error(message);
|
||||
hydrated[index2] = error;
|
||||
set(error);
|
||||
continue;
|
||||
case TYPE_PREVIOUS_RESOLVED:
|
||||
set(hydrated[index2] = hydrated[b]);
|
||||
continue;
|
||||
default:
|
||||
if (Array.isArray(plugins)) {
|
||||
const r = [];
|
||||
const vals = value.slice(1);
|
||||
for (let i = 0; i < vals.length; i++) {
|
||||
const v = vals[i];
|
||||
stack.push([
|
||||
v,
|
||||
(v2) => {
|
||||
r[i] = v2;
|
||||
}
|
||||
]);
|
||||
}
|
||||
postRun.push(() => {
|
||||
for (const plugin of plugins) {
|
||||
const result2 = plugin(value[0], ...r);
|
||||
if (result2) {
|
||||
set(hydrated[index2] = result2.value);
|
||||
return;
|
||||
}
|
||||
}
|
||||
throw new SyntaxError();
|
||||
});
|
||||
continue;
|
||||
}
|
||||
throw new SyntaxError();
|
||||
}
|
||||
} else {
|
||||
const array = [];
|
||||
hydrated[index2] = array;
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
const n = value[i];
|
||||
if (n !== HOLE) {
|
||||
stack.push([
|
||||
n,
|
||||
(v) => {
|
||||
array[i] = v;
|
||||
}
|
||||
]);
|
||||
}
|
||||
}
|
||||
set(array);
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
const object = {};
|
||||
hydrated[index2] = object;
|
||||
for (const key of Object.keys(value).reverse()) {
|
||||
const r = [];
|
||||
stack.push([
|
||||
value[key],
|
||||
(v) => {
|
||||
r[1] = v;
|
||||
}
|
||||
]);
|
||||
stack.push([
|
||||
Number(key.slice(1)),
|
||||
(k) => {
|
||||
r[0] = k;
|
||||
}
|
||||
]);
|
||||
postRun.push(() => {
|
||||
object[r[0]] = r[1];
|
||||
});
|
||||
}
|
||||
set(object);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
while (postRun.length > 0) {
|
||||
postRun.pop()();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// src/turbo-stream.ts
|
||||
async function decode(readable, options) {
|
||||
const { plugins } = options ?? {};
|
||||
const done = new Deferred();
|
||||
const reader = readable.pipeThrough(createLineSplittingTransform()).getReader();
|
||||
const decoder = {
|
||||
values: [],
|
||||
hydrated: [],
|
||||
deferred: {},
|
||||
plugins
|
||||
};
|
||||
const decoded = await decodeInitial.call(decoder, reader);
|
||||
let donePromise = done.promise;
|
||||
if (decoded.done) {
|
||||
done.resolve();
|
||||
} else {
|
||||
donePromise = decodeDeferred.call(decoder, reader).then(done.resolve).catch((reason) => {
|
||||
for (const deferred of Object.values(decoder.deferred)) {
|
||||
deferred.reject(reason);
|
||||
}
|
||||
done.reject(reason);
|
||||
});
|
||||
}
|
||||
return {
|
||||
done: donePromise.then(() => reader.closed),
|
||||
value: decoded.value
|
||||
};
|
||||
}
|
||||
async function decodeInitial(reader) {
|
||||
const read = await reader.read();
|
||||
if (!read.value) {
|
||||
throw new SyntaxError();
|
||||
}
|
||||
let line;
|
||||
try {
|
||||
line = JSON.parse(read.value);
|
||||
} catch (reason) {
|
||||
throw new SyntaxError();
|
||||
}
|
||||
return {
|
||||
done: read.done,
|
||||
value: unflatten.call(this, line)
|
||||
};
|
||||
}
|
||||
async function decodeDeferred(reader) {
|
||||
let read = await reader.read();
|
||||
while (!read.done) {
|
||||
if (!read.value)
|
||||
continue;
|
||||
const line = read.value;
|
||||
switch (line[0]) {
|
||||
case TYPE_PROMISE: {
|
||||
const colonIndex = line.indexOf(":");
|
||||
const deferredId = Number(line.slice(1, colonIndex));
|
||||
const deferred = this.deferred[deferredId];
|
||||
if (!deferred) {
|
||||
throw new Error(`Deferred ID ${deferredId} not found in stream`);
|
||||
}
|
||||
const lineData = line.slice(colonIndex + 1);
|
||||
let jsonLine;
|
||||
try {
|
||||
jsonLine = JSON.parse(lineData);
|
||||
} catch (reason) {
|
||||
throw new SyntaxError();
|
||||
}
|
||||
const value = unflatten.call(this, jsonLine);
|
||||
deferred.resolve(value);
|
||||
break;
|
||||
}
|
||||
case TYPE_ERROR: {
|
||||
const colonIndex = line.indexOf(":");
|
||||
const deferredId = Number(line.slice(1, colonIndex));
|
||||
const deferred = this.deferred[deferredId];
|
||||
if (!deferred) {
|
||||
throw new Error(`Deferred ID ${deferredId} not found in stream`);
|
||||
}
|
||||
const lineData = line.slice(colonIndex + 1);
|
||||
let jsonLine;
|
||||
try {
|
||||
jsonLine = JSON.parse(lineData);
|
||||
} catch (reason) {
|
||||
throw new SyntaxError();
|
||||
}
|
||||
const value = unflatten.call(this, jsonLine);
|
||||
deferred.reject(value);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new SyntaxError();
|
||||
}
|
||||
read = await reader.read();
|
||||
}
|
||||
}
|
||||
function encode(input, options) {
|
||||
const { plugins, postPlugins, signal } = options ?? {};
|
||||
const encoder = {
|
||||
deferred: {},
|
||||
index: 0,
|
||||
indices: /* @__PURE__ */ new Map(),
|
||||
stringified: [],
|
||||
plugins,
|
||||
postPlugins,
|
||||
signal
|
||||
};
|
||||
const textEncoder = new TextEncoder();
|
||||
let lastSentIndex = 0;
|
||||
const readable = new ReadableStream({
|
||||
async start(controller) {
|
||||
const id = flatten.call(encoder, input);
|
||||
if (Array.isArray(id)) {
|
||||
throw new Error("This should never happen");
|
||||
}
|
||||
if (id < 0) {
|
||||
controller.enqueue(textEncoder.encode(`${id}
|
||||
`));
|
||||
} else {
|
||||
controller.enqueue(
|
||||
textEncoder.encode(`[${encoder.stringified.join(",")}]
|
||||
`)
|
||||
);
|
||||
lastSentIndex = encoder.stringified.length - 1;
|
||||
}
|
||||
const seenPromises = /* @__PURE__ */ new WeakSet();
|
||||
while (Object.keys(encoder.deferred).length > 0) {
|
||||
for (const [deferredId, deferred] of Object.entries(encoder.deferred)) {
|
||||
if (seenPromises.has(deferred))
|
||||
continue;
|
||||
seenPromises.add(
|
||||
encoder.deferred[Number(deferredId)] = raceSignal(
|
||||
deferred,
|
||||
encoder.signal
|
||||
).then(
|
||||
(resolved) => {
|
||||
const id2 = flatten.call(encoder, resolved);
|
||||
if (Array.isArray(id2)) {
|
||||
controller.enqueue(
|
||||
textEncoder.encode(
|
||||
`${TYPE_PROMISE}${deferredId}:[["${TYPE_PREVIOUS_RESOLVED}",${id2[0]}]]
|
||||
`
|
||||
)
|
||||
);
|
||||
encoder.index++;
|
||||
lastSentIndex++;
|
||||
} else if (id2 < 0) {
|
||||
controller.enqueue(
|
||||
textEncoder.encode(`${TYPE_PROMISE}${deferredId}:${id2}
|
||||
`)
|
||||
);
|
||||
} else {
|
||||
const values = encoder.stringified.slice(lastSentIndex + 1).join(",");
|
||||
controller.enqueue(
|
||||
textEncoder.encode(
|
||||
`${TYPE_PROMISE}${deferredId}:[${values}]
|
||||
`
|
||||
)
|
||||
);
|
||||
lastSentIndex = encoder.stringified.length - 1;
|
||||
}
|
||||
},
|
||||
(reason) => {
|
||||
if (!reason || typeof reason !== "object" || !(reason instanceof Error)) {
|
||||
reason = new Error("An unknown error occurred");
|
||||
}
|
||||
const id2 = flatten.call(encoder, reason);
|
||||
if (Array.isArray(id2)) {
|
||||
controller.enqueue(
|
||||
textEncoder.encode(
|
||||
`${TYPE_ERROR}${deferredId}:[["${TYPE_PREVIOUS_RESOLVED}",${id2[0]}]]
|
||||
`
|
||||
)
|
||||
);
|
||||
encoder.index++;
|
||||
lastSentIndex++;
|
||||
} else if (id2 < 0) {
|
||||
controller.enqueue(
|
||||
textEncoder.encode(`${TYPE_ERROR}${deferredId}:${id2}
|
||||
`)
|
||||
);
|
||||
} else {
|
||||
const values = encoder.stringified.slice(lastSentIndex + 1).join(",");
|
||||
controller.enqueue(
|
||||
textEncoder.encode(
|
||||
`${TYPE_ERROR}${deferredId}:[${values}]
|
||||
`
|
||||
)
|
||||
);
|
||||
lastSentIndex = encoder.stringified.length - 1;
|
||||
}
|
||||
}
|
||||
).finally(() => {
|
||||
delete encoder.deferred[Number(deferredId)];
|
||||
})
|
||||
);
|
||||
}
|
||||
await Promise.race(Object.values(encoder.deferred));
|
||||
}
|
||||
await Promise.all(Object.values(encoder.deferred));
|
||||
controller.close();
|
||||
}
|
||||
});
|
||||
return readable;
|
||||
}
|
||||
function raceSignal(promise, signal) {
|
||||
if (!signal)
|
||||
return promise;
|
||||
if (signal.aborted)
|
||||
return Promise.reject(signal.reason || new Error("Signal was aborted."));
|
||||
const abort = new Promise((resolve, reject) => {
|
||||
signal.addEventListener("abort", (event) => {
|
||||
reject(signal.reason || new Error("Signal was aborted."));
|
||||
});
|
||||
promise.then(resolve).catch(reject);
|
||||
});
|
||||
abort.catch(() => {
|
||||
});
|
||||
return Promise.race([abort, promise]);
|
||||
}
|
||||
export {
|
||||
decode,
|
||||
encode
|
||||
};
|
||||
@@ -0,0 +1,9 @@
|
||||
import { JSONHydrator } from './postcss.js'
|
||||
|
||||
interface FromJSON extends JSONHydrator {
|
||||
default: FromJSON
|
||||
}
|
||||
|
||||
declare const fromJSON: FromJSON
|
||||
|
||||
export = fromJSON
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,140 @@
|
||||
# `@humanfs/core`
|
||||
|
||||
by [Nicholas C. Zakas](https://humanwhocodes.com)
|
||||
|
||||
If you find this useful, please consider supporting my work with a [donation](https://humanwhocodes.com/donate) or [nominate me](https://stars.github.com/nominate/) for a GitHub Star.
|
||||
|
||||
## Description
|
||||
|
||||
The core functionality for humanfs that is shared across all implementations for all runtimes. The contents of this package are intentionally runtime agnostic and are not intended to be used alone.
|
||||
|
||||
Currently, this package simply exports the `Hfs` class, which is an abstract base class intended to be inherited from in runtime-specific hfs packages (like `@humanfs/node`).
|
||||
|
||||
> [!WARNING]
|
||||
> This project is **experimental** and may change significantly before v1.0.0. Use at your own caution and definitely not in production!
|
||||
|
||||
## Installation
|
||||
|
||||
### Node.js
|
||||
|
||||
Install using your favorite package manager for Node.js:
|
||||
|
||||
```shell
|
||||
npm install @humanfs/core
|
||||
|
||||
# or
|
||||
|
||||
pnpm install @humanfs/core
|
||||
|
||||
# or
|
||||
|
||||
yarn add @humanfs/core
|
||||
|
||||
# or
|
||||
|
||||
bun install @humanfs/core
|
||||
```
|
||||
|
||||
Then you can import the `Hfs` and `Path` classes like this:
|
||||
|
||||
```js
|
||||
import { Hfs, Path } from "@humanfs/core";
|
||||
```
|
||||
|
||||
### Deno
|
||||
|
||||
Install using [JSR](https://jsr.io):
|
||||
|
||||
```shell
|
||||
deno add @humanfs/core
|
||||
|
||||
# or
|
||||
|
||||
jsr add @humanfs/core
|
||||
```
|
||||
|
||||
Then you can import the `Hfs` class like this:
|
||||
|
||||
```js
|
||||
import { Hfs, Path } from "@humanfs/core";
|
||||
```
|
||||
|
||||
### Browser
|
||||
|
||||
It's recommended to import the minified version to save bandwidth:
|
||||
|
||||
```js
|
||||
import { Hfs, Path } from "https://cdn.skypack.dev/@humanfs/core?min";
|
||||
```
|
||||
|
||||
However, you can also import the unminified version for debugging purposes:
|
||||
|
||||
```js
|
||||
import { Hfs, Path } from "https://cdn.skypack.dev/@humanfs/core";
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### `Hfs` Class
|
||||
|
||||
The `Hfs` class contains all of the basic functionality for an `Hfs` instance *without* a predefined impl. This class is mostly used for creating runtime-specific impls, such as `NodeHfs` and `DenoHfs`.
|
||||
|
||||
You can create your own instance by providing an `impl` directly:
|
||||
|
||||
```js
|
||||
const hfs = new Hfs({ impl: { async text() {} }});
|
||||
```
|
||||
|
||||
The specified `impl` becomes the base impl for the instance, meaning you can always reset back to it using `resetImpl()`.
|
||||
|
||||
You can also inherit from `Hfs` to create your own class with a preconfigured impl, such as:
|
||||
|
||||
```js
|
||||
class MyHfs extends Hfs {
|
||||
constructor() {
|
||||
super({
|
||||
impl: myImpl
|
||||
});
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### `Path` Class
|
||||
|
||||
The `Path` class represents the path to a directory or file within a file system. It's an abstract representation that can be used even outside of traditional file systems where string paths might not make sense.
|
||||
|
||||
```js
|
||||
const myPath = new Path(["dir", "subdir"]);
|
||||
console.log(myPath.toString()); // "dir/subdir"
|
||||
|
||||
// add another step
|
||||
myPath.push("file.txt");
|
||||
console.log(myPath.toString()); // "dir/subdir/file.txt"
|
||||
|
||||
// get just the last step
|
||||
console.log(myPath.name); // "file.txt"
|
||||
|
||||
// change just the last step
|
||||
myPath.name = "file.json";
|
||||
console.log(myPath.name); // "file.json"
|
||||
console.log(myPath.toString()); // "dir/subdir/file.json"
|
||||
|
||||
// get the size of the path
|
||||
console.log(myPath.size); // 3
|
||||
|
||||
// remove the last step
|
||||
myPath.pop();
|
||||
console.log(myPath.toString()); // "dir/subdir"
|
||||
|
||||
// iterate over the steps
|
||||
for (const step of myPath) {
|
||||
// do something
|
||||
}
|
||||
|
||||
// create a new path from a string
|
||||
const newPath = Path.fromString("/foo/bar");
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
Apache 2.0
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"names":["_index","require","removeComments","node","COMMENT_KEYS","forEach","key"],"sources":["../../src/comments/removeComments.ts"],"sourcesContent":["import { COMMENT_KEYS } from \"../constants/index.ts\";\nimport type * as t from \"../index.ts\";\n\n/**\n * Remove comment properties from a node.\n */\nexport default function removeComments<T extends t.Node>(node: T): T {\n COMMENT_KEYS.forEach(key => {\n node[key] = null;\n });\n\n return node;\n}\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAMe,SAASC,cAAcA,CAAmBC,IAAO,EAAK;EACnEC,mBAAY,CAACC,OAAO,CAACC,GAAG,IAAI;IAC1BH,IAAI,CAACG,GAAG,CAAC,GAAG,IAAI;EAClB,CAAC,CAAC;EAEF,OAAOH,IAAI;AACb","ignoreList":[]}
|
||||
@@ -0,0 +1,93 @@
|
||||
// Copyright (c) 2010 LearnBoost <tj@learnboost.com>
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "Canvas.h"
|
||||
|
||||
#ifdef HAVE_JPEG
|
||||
#include <jpeglib.h>
|
||||
#endif
|
||||
|
||||
#include <napi.h>
|
||||
#include <png.h>
|
||||
#include <stdint.h> // node < 7 uses libstdc++ on macOS which lacks complete c++11
|
||||
#include <vector>
|
||||
|
||||
#ifndef PAGE_SIZE
|
||||
#define PAGE_SIZE 4096
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Image encoding closures.
|
||||
*/
|
||||
|
||||
struct Closure {
|
||||
std::vector<uint8_t> vec;
|
||||
Napi::FunctionReference cb;
|
||||
Canvas* canvas = nullptr;
|
||||
cairo_status_t status = CAIRO_STATUS_SUCCESS;
|
||||
|
||||
static cairo_status_t writeVec(void *c, const uint8_t *odata, unsigned len) {
|
||||
Closure* closure = static_cast<Closure*>(c);
|
||||
try {
|
||||
closure->vec.insert(closure->vec.end(), odata, odata + len);
|
||||
} catch (const std::bad_alloc &) {
|
||||
return CAIRO_STATUS_NO_MEMORY;
|
||||
}
|
||||
return CAIRO_STATUS_SUCCESS;
|
||||
}
|
||||
|
||||
Closure(Canvas* canvas) : canvas(canvas) {};
|
||||
};
|
||||
|
||||
struct PdfSvgClosure : Closure {
|
||||
PdfSvgClosure(Canvas* canvas) : Closure(canvas) {};
|
||||
};
|
||||
|
||||
struct PngClosure : Closure {
|
||||
uint32_t compressionLevel = 6;
|
||||
uint32_t filters = PNG_ALL_FILTERS;
|
||||
uint32_t resolution = 0; // 0 = unspecified
|
||||
// Indexed PNGs:
|
||||
uint32_t nPaletteColors = 0;
|
||||
uint8_t* palette = nullptr;
|
||||
uint8_t backgroundIndex = 0;
|
||||
|
||||
PngClosure(Canvas* canvas) : Closure(canvas) {};
|
||||
};
|
||||
|
||||
#ifdef HAVE_JPEG
|
||||
struct JpegClosure : Closure {
|
||||
uint32_t quality = 75;
|
||||
uint32_t chromaSubsampling = 2;
|
||||
bool progressive = false;
|
||||
jpeg_destination_mgr* jpeg_dest_mgr = nullptr;
|
||||
|
||||
static void init_destination(j_compress_ptr cinfo);
|
||||
static boolean empty_output_buffer(j_compress_ptr cinfo);
|
||||
static void term_destination(j_compress_ptr cinfo);
|
||||
|
||||
JpegClosure(Canvas* canvas) : Closure(canvas) {
|
||||
jpeg_dest_mgr = new jpeg_destination_mgr;
|
||||
jpeg_dest_mgr->init_destination = init_destination;
|
||||
jpeg_dest_mgr->empty_output_buffer = empty_output_buffer;
|
||||
jpeg_dest_mgr->term_destination = term_destination;
|
||||
};
|
||||
|
||||
~JpegClosure() {
|
||||
delete jpeg_dest_mgr;
|
||||
}
|
||||
};
|
||||
#endif
|
||||
|
||||
class EncodingWorker : public Napi::AsyncWorker {
|
||||
public:
|
||||
EncodingWorker(Napi::Env env): Napi::AsyncWorker(env) {};
|
||||
void Init(void (*work_fn)(Closure*), Closure* closure);
|
||||
void Execute() override;
|
||||
void OnWorkComplete(Napi::Env env, napi_status status) override;
|
||||
|
||||
private:
|
||||
void (*work_fn)(Closure*) = nullptr;
|
||||
Closure* closure = nullptr;
|
||||
};
|
||||
@@ -0,0 +1,105 @@
|
||||
/**
|
||||
* @fileoverview Rule to check that spaced function application
|
||||
* @author Matt DuVall <http://www.mattduvall.com>
|
||||
* @deprecated in ESLint v3.3.0
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/** @type {import('../shared/types').Rule} */
|
||||
module.exports = {
|
||||
meta: {
|
||||
type: "layout",
|
||||
|
||||
docs: {
|
||||
description:
|
||||
"Disallow spacing between function identifiers and their applications (deprecated)",
|
||||
recommended: false,
|
||||
url: "https://eslint.org/docs/latest/rules/no-spaced-func",
|
||||
},
|
||||
|
||||
deprecated: {
|
||||
message: "Formatting rules are being moved out of ESLint core.",
|
||||
url: "https://eslint.org/blog/2016/08/eslint-v3.3.0-released/#deprecated-rules",
|
||||
deprecatedSince: "3.3.0",
|
||||
availableUntil: "10.0.0",
|
||||
replacedBy: [
|
||||
{
|
||||
message:
|
||||
"ESLint Stylistic now maintains deprecated stylistic core rules.",
|
||||
url: "https://eslint.style/guide/migration",
|
||||
plugin: {
|
||||
name: "@stylistic/eslint-plugin-js",
|
||||
url: "https://eslint.style/packages/js",
|
||||
},
|
||||
rule: {
|
||||
name: "function-call-spacing",
|
||||
url: "https://eslint.style/rules/js/function-call-spacing",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
fixable: "whitespace",
|
||||
schema: [],
|
||||
|
||||
messages: {
|
||||
noSpacedFunction:
|
||||
"Unexpected space between function name and paren.",
|
||||
},
|
||||
},
|
||||
|
||||
create(context) {
|
||||
const sourceCode = context.sourceCode;
|
||||
|
||||
/**
|
||||
* Check if open space is present in a function name
|
||||
* @param {ASTNode} node node to evaluate
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
function detectOpenSpaces(node) {
|
||||
const lastCalleeToken = sourceCode.getLastToken(node.callee);
|
||||
let prevToken = lastCalleeToken,
|
||||
parenToken = sourceCode.getTokenAfter(lastCalleeToken);
|
||||
|
||||
// advances to an open parenthesis.
|
||||
while (
|
||||
parenToken &&
|
||||
parenToken.range[1] < node.range[1] &&
|
||||
parenToken.value !== "("
|
||||
) {
|
||||
prevToken = parenToken;
|
||||
parenToken = sourceCode.getTokenAfter(parenToken);
|
||||
}
|
||||
|
||||
// look for a space between the callee and the open paren
|
||||
if (
|
||||
parenToken &&
|
||||
parenToken.range[1] < node.range[1] &&
|
||||
sourceCode.isSpaceBetweenTokens(prevToken, parenToken)
|
||||
) {
|
||||
context.report({
|
||||
node,
|
||||
loc: lastCalleeToken.loc.start,
|
||||
messageId: "noSpacedFunction",
|
||||
fix(fixer) {
|
||||
return fixer.removeRange([
|
||||
prevToken.range[1],
|
||||
parenToken.range[0],
|
||||
]);
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
CallExpression: detectOpenSpaces,
|
||||
NewExpression: detectOpenSpaces,
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,55 @@
|
||||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.setupNextLevel }}
|
||||
|
||||
|
||||
{{
|
||||
var $idx = 'i' + $lvl
|
||||
, $dataNxt = $it.dataLevel = it.dataLevel + 1
|
||||
, $nextData = 'data' + $dataNxt
|
||||
, $currentBaseId = it.baseId
|
||||
, $nonEmptySchema = {{# def.nonEmptySchema:$schema }};
|
||||
}}
|
||||
|
||||
var {{=$errs}} = errors;
|
||||
var {{=$valid}};
|
||||
|
||||
{{? $nonEmptySchema }}
|
||||
{{# def.setCompositeRule }}
|
||||
|
||||
{{
|
||||
$it.schema = $schema;
|
||||
$it.schemaPath = $schemaPath;
|
||||
$it.errSchemaPath = $errSchemaPath;
|
||||
}}
|
||||
|
||||
var {{=$nextValid}} = false;
|
||||
|
||||
for (var {{=$idx}} = 0; {{=$idx}} < {{=$data}}.length; {{=$idx}}++) {
|
||||
{{
|
||||
$it.errorPath = it.util.getPathExpr(it.errorPath, $idx, it.opts.jsonPointers, true);
|
||||
var $passData = $data + '[' + $idx + ']';
|
||||
$it.dataPathArr[$dataNxt] = $idx;
|
||||
}}
|
||||
|
||||
{{# def.generateSubschemaCode }}
|
||||
{{# def.optimizeValidate }}
|
||||
|
||||
if ({{=$nextValid}}) break;
|
||||
}
|
||||
|
||||
{{# def.resetCompositeRule }}
|
||||
{{= $closingBraces }}
|
||||
|
||||
if (!{{=$nextValid}}) {
|
||||
{{??}}
|
||||
if ({{=$data}}.length == 0) {
|
||||
{{?}}
|
||||
|
||||
{{# def.error:'contains' }}
|
||||
} else {
|
||||
{{? $nonEmptySchema }}
|
||||
{{# def.resetErrors }}
|
||||
{{?}}
|
||||
{{? it.opts.allErrors }} } {{?}}
|
||||
@@ -0,0 +1,79 @@
|
||||
{{# def.definitions }}
|
||||
{{# def.errors }}
|
||||
{{# def.missing }}
|
||||
{{# def.setupKeyword }}
|
||||
{{# def.setupNextLevel }}
|
||||
|
||||
|
||||
{{## def.propertyInData:
|
||||
{{=$data}}{{= it.util.getProperty($property) }} !== undefined
|
||||
{{? $ownProperties }}
|
||||
&& Object.prototype.hasOwnProperty.call({{=$data}}, '{{=it.util.escapeQuotes($property)}}')
|
||||
{{?}}
|
||||
#}}
|
||||
|
||||
|
||||
{{
|
||||
var $schemaDeps = {}
|
||||
, $propertyDeps = {}
|
||||
, $ownProperties = it.opts.ownProperties;
|
||||
|
||||
for ($property in $schema) {
|
||||
if ($property == '__proto__') continue;
|
||||
var $sch = $schema[$property];
|
||||
var $deps = Array.isArray($sch) ? $propertyDeps : $schemaDeps;
|
||||
$deps[$property] = $sch;
|
||||
}
|
||||
}}
|
||||
|
||||
var {{=$errs}} = errors;
|
||||
|
||||
{{ var $currentErrorPath = it.errorPath; }}
|
||||
|
||||
var missing{{=$lvl}};
|
||||
{{ for (var $property in $propertyDeps) { }}
|
||||
{{ $deps = $propertyDeps[$property]; }}
|
||||
{{? $deps.length }}
|
||||
if ({{# def.propertyInData }}
|
||||
{{? $breakOnError }}
|
||||
&& ({{# def.checkMissingProperty:$deps }})) {
|
||||
{{# def.errorMissingProperty:'dependencies' }}
|
||||
{{??}}
|
||||
) {
|
||||
{{~ $deps:$propertyKey }}
|
||||
{{# def.allErrorsMissingProperty:'dependencies' }}
|
||||
{{~}}
|
||||
{{?}}
|
||||
} {{# def.elseIfValid }}
|
||||
{{?}}
|
||||
{{ } }}
|
||||
|
||||
{{
|
||||
it.errorPath = $currentErrorPath;
|
||||
var $currentBaseId = $it.baseId;
|
||||
}}
|
||||
|
||||
|
||||
{{ for (var $property in $schemaDeps) { }}
|
||||
{{ var $sch = $schemaDeps[$property]; }}
|
||||
{{? {{# def.nonEmptySchema:$sch }} }}
|
||||
{{=$nextValid}} = true;
|
||||
|
||||
if ({{# def.propertyInData }}) {
|
||||
{{
|
||||
$it.schema = $sch;
|
||||
$it.schemaPath = $schemaPath + it.util.getProperty($property);
|
||||
$it.errSchemaPath = $errSchemaPath + '/' + it.util.escapeFragment($property);
|
||||
}}
|
||||
|
||||
{{# def.insertSubschemaCode }}
|
||||
}
|
||||
|
||||
{{# def.ifResultValid }}
|
||||
{{?}}
|
||||
{{ } }}
|
||||
|
||||
{{? $breakOnError }}
|
||||
{{= $closingBraces }}
|
||||
if ({{=$errs}} == errors) {
|
||||
{{?}}
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,21 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2015 Rich Harris
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
@@ -0,0 +1,70 @@
|
||||
/**
|
||||
* @fileoverview Rule to enforce description with the `Symbol` object
|
||||
* @author Jarek Rencz
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const astUtils = require("./utils/ast-utils");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/** @type {import('../shared/types').Rule} */
|
||||
module.exports = {
|
||||
meta: {
|
||||
type: "suggestion",
|
||||
|
||||
docs: {
|
||||
description: "Require symbol descriptions",
|
||||
recommended: false,
|
||||
url: "https://eslint.org/docs/latest/rules/symbol-description",
|
||||
},
|
||||
fixable: null,
|
||||
schema: [],
|
||||
messages: {
|
||||
expected: "Expected Symbol to have a description.",
|
||||
},
|
||||
},
|
||||
|
||||
create(context) {
|
||||
const sourceCode = context.sourceCode;
|
||||
|
||||
/**
|
||||
* Reports if node does not conform the rule in case rule is set to
|
||||
* report missing description
|
||||
* @param {ASTNode} node A CallExpression node to check.
|
||||
* @returns {void}
|
||||
*/
|
||||
function checkArgument(node) {
|
||||
if (node.arguments.length === 0) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: "expected",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
"Program:exit"(node) {
|
||||
const scope = sourceCode.getScope(node);
|
||||
const variable = astUtils.getVariableByName(scope, "Symbol");
|
||||
|
||||
if (variable && variable.defs.length === 0) {
|
||||
variable.references.forEach(reference => {
|
||||
const idNode = reference.identifier;
|
||||
|
||||
if (astUtils.isCallee(idNode)) {
|
||||
checkArgument(idNode.parent);
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
Reference in New Issue
Block a user