update
This commit is contained in:
@@ -0,0 +1,452 @@
|
||||
/**
|
||||
* @fileoverview The main file for the hfs package.
|
||||
* @author Nicholas C. Zakas
|
||||
*/
|
||||
/* global Buffer:readonly, URL */
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Types
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
/** @typedef {import("@humanfs/types").HfsImpl} HfsImpl */
|
||||
/** @typedef {import("@humanfs/types").HfsDirectoryEntry} HfsDirectoryEntry */
|
||||
/** @typedef {import("node:fs/promises")} Fsp */
|
||||
/** @typedef {import("fs").Dirent} Dirent */
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Imports
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
import { Hfs } from "@humanfs/core";
|
||||
import path from "node:path";
|
||||
import { Retrier } from "@humanwhocodes/retry";
|
||||
import nativeFsp from "node:fs/promises";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Constants
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
const RETRY_ERROR_CODES = new Set(["ENFILE", "EMFILE"]);
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* A class representing a directory entry.
|
||||
* @implements {HfsDirectoryEntry}
|
||||
*/
|
||||
class NodeHfsDirectoryEntry {
|
||||
/**
|
||||
* The name of the directory entry.
|
||||
* @type {string}
|
||||
*/
|
||||
name;
|
||||
|
||||
/**
|
||||
* True if the entry is a file.
|
||||
* @type {boolean}
|
||||
*/
|
||||
isFile;
|
||||
|
||||
/**
|
||||
* True if the entry is a directory.
|
||||
* @type {boolean}
|
||||
*/
|
||||
isDirectory;
|
||||
|
||||
/**
|
||||
* True if the entry is a symbolic link.
|
||||
* @type {boolean}
|
||||
*/
|
||||
isSymlink;
|
||||
|
||||
/**
|
||||
* Creates a new instance.
|
||||
* @param {Dirent} dirent The directory entry to wrap.
|
||||
*/
|
||||
constructor(dirent) {
|
||||
this.name = dirent.name;
|
||||
this.isFile = dirent.isFile();
|
||||
this.isDirectory = dirent.isDirectory();
|
||||
this.isSymlink = dirent.isSymbolicLink();
|
||||
}
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Exports
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* A class representing the Node.js implementation of Hfs.
|
||||
* @implements {HfsImpl}
|
||||
*/
|
||||
export class NodeHfsImpl {
|
||||
/**
|
||||
* The file system module to use.
|
||||
* @type {Fsp}
|
||||
*/
|
||||
#fsp;
|
||||
|
||||
/**
|
||||
* The retryer object used for retrying operations.
|
||||
* @type {Retrier}
|
||||
*/
|
||||
#retrier;
|
||||
|
||||
/**
|
||||
* Creates a new instance.
|
||||
* @param {object} [options] The options for the instance.
|
||||
* @param {Fsp} [options.fsp] The file system module to use.
|
||||
*/
|
||||
constructor({ fsp = nativeFsp } = {}) {
|
||||
this.#fsp = fsp;
|
||||
this.#retrier = new Retrier(error => RETRY_ERROR_CODES.has(error.code));
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a file and returns the contents as an Uint8Array.
|
||||
* @param {string|URL} filePath The path to the file to read.
|
||||
* @returns {Promise<Uint8Array|undefined>} A promise that resolves with the contents
|
||||
* of the file or undefined if the file doesn't exist.
|
||||
* @throws {Error} If the file cannot be read.
|
||||
* @throws {TypeError} If the file path is not a string.
|
||||
*/
|
||||
bytes(filePath) {
|
||||
return this.#retrier
|
||||
.retry(() => this.#fsp.readFile(filePath))
|
||||
.then(buffer => new Uint8Array(buffer.buffer))
|
||||
.catch(error => {
|
||||
if (error.code === "ENOENT") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes a value to a file. If the value is a string, UTF-8 encoding is used.
|
||||
* @param {string|URL} filePath The path to the file to write.
|
||||
* @param {Uint8Array} contents The contents to write to the
|
||||
* file.
|
||||
* @returns {Promise<void>} A promise that resolves when the file is
|
||||
* written.
|
||||
* @throws {TypeError} If the file path is not a string.
|
||||
* @throws {Error} If the file cannot be written.
|
||||
*/
|
||||
async write(filePath, contents) {
|
||||
const value = Buffer.from(contents);
|
||||
|
||||
return this.#retrier
|
||||
.retry(() => this.#fsp.writeFile(filePath, value))
|
||||
.catch(error => {
|
||||
// the directory may not exist, so create it
|
||||
if (error.code === "ENOENT") {
|
||||
const dirPath = path.dirname(
|
||||
filePath instanceof URL
|
||||
? fileURLToPath(filePath)
|
||||
: filePath,
|
||||
);
|
||||
|
||||
return this.#fsp
|
||||
.mkdir(dirPath, { recursive: true })
|
||||
.then(() => this.#fsp.writeFile(filePath, value));
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends a value to a file. If the value is a string, UTF-8 encoding is used.
|
||||
* @param {string|URL} filePath The path to the file to append to.
|
||||
* @param {Uint8Array} contents The contents to append to the
|
||||
* file.
|
||||
* @returns {Promise<void>} A promise that resolves when the file is
|
||||
* written.
|
||||
* @throws {TypeError} If the file path is not a string.
|
||||
* @throws {Error} If the file cannot be appended to.
|
||||
*/
|
||||
async append(filePath, contents) {
|
||||
const value = Buffer.from(contents);
|
||||
|
||||
return this.#retrier
|
||||
.retry(() => this.#fsp.appendFile(filePath, value))
|
||||
.catch(error => {
|
||||
// the directory may not exist, so create it
|
||||
if (error.code === "ENOENT") {
|
||||
const dirPath = path.dirname(
|
||||
filePath instanceof URL
|
||||
? fileURLToPath(filePath)
|
||||
: filePath,
|
||||
);
|
||||
|
||||
return this.#fsp
|
||||
.mkdir(dirPath, { recursive: true })
|
||||
.then(() => this.#fsp.appendFile(filePath, value));
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a file exists.
|
||||
* @param {string|URL} filePath The path to the file to check.
|
||||
* @returns {Promise<boolean>} A promise that resolves with true if the
|
||||
* file exists or false if it does not.
|
||||
* @throws {Error} If the operation fails with a code other than ENOENT.
|
||||
*/
|
||||
isFile(filePath) {
|
||||
return this.#fsp
|
||||
.stat(filePath)
|
||||
.then(stat => stat.isFile())
|
||||
.catch(error => {
|
||||
if (error.code === "ENOENT") {
|
||||
return false;
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a directory exists.
|
||||
* @param {string|URL} dirPath The path to the directory to check.
|
||||
* @returns {Promise<boolean>} A promise that resolves with true if the
|
||||
* directory exists or false if it does not.
|
||||
* @throws {Error} If the operation fails with a code other than ENOENT.
|
||||
*/
|
||||
isDirectory(dirPath) {
|
||||
return this.#fsp
|
||||
.stat(dirPath)
|
||||
.then(stat => stat.isDirectory())
|
||||
.catch(error => {
|
||||
if (error.code === "ENOENT") {
|
||||
return false;
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a directory recursively.
|
||||
* @param {string|URL} dirPath The path to the directory to create.
|
||||
* @returns {Promise<void>} A promise that resolves when the directory is
|
||||
* created.
|
||||
*/
|
||||
async createDirectory(dirPath) {
|
||||
await this.#fsp.mkdir(dirPath, { recursive: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a file or empty directory.
|
||||
* @param {string|URL} fileOrDirPath The path to the file or directory to
|
||||
* delete.
|
||||
* @returns {Promise<boolean>} A promise that resolves when the file or
|
||||
* directory is deleted, true if the file or directory is deleted, false
|
||||
* if the file or directory does not exist.
|
||||
* @throws {TypeError} If the file or directory path is not a string.
|
||||
* @throws {Error} If the file or directory cannot be deleted.
|
||||
*/
|
||||
delete(fileOrDirPath) {
|
||||
return this.#fsp
|
||||
.rm(fileOrDirPath)
|
||||
.then(() => true)
|
||||
.catch(error => {
|
||||
if (error.code === "ERR_FS_EISDIR") {
|
||||
return this.#fsp.rmdir(fileOrDirPath).then(() => true);
|
||||
}
|
||||
|
||||
if (error.code === "ENOENT") {
|
||||
return false;
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a file or directory recursively.
|
||||
* @param {string|URL} fileOrDirPath The path to the file or directory to
|
||||
* delete.
|
||||
* @returns {Promise<boolean>} A promise that resolves when the file or
|
||||
* directory is deleted, true if the file or directory is deleted, false
|
||||
* if the file or directory does not exist.
|
||||
* @throws {TypeError} If the file or directory path is not a string.
|
||||
* @throws {Error} If the file or directory cannot be deleted.
|
||||
*/
|
||||
deleteAll(fileOrDirPath) {
|
||||
return this.#fsp
|
||||
.rm(fileOrDirPath, { recursive: true })
|
||||
.then(() => true)
|
||||
.catch(error => {
|
||||
if (error.code === "ENOENT") {
|
||||
return false;
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of directory entries for the given path.
|
||||
* @param {string|URL} dirPath The path to the directory to read.
|
||||
* @returns {AsyncIterable<HfsDirectoryEntry>} A promise that resolves with the
|
||||
* directory entries.
|
||||
* @throws {TypeError} If the directory path is not a string.
|
||||
* @throws {Error} If the directory cannot be read.
|
||||
*/
|
||||
async *list(dirPath) {
|
||||
const entries = await this.#fsp.readdir(dirPath, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
for (const entry of entries) {
|
||||
yield new NodeHfsDirectoryEntry(entry);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the size of a file. This method handles ENOENT errors
|
||||
* and returns undefined in that case.
|
||||
* @param {string|URL} filePath The path to the file to read.
|
||||
* @returns {Promise<number|undefined>} A promise that resolves with the size of the
|
||||
* file in bytes or undefined if the file doesn't exist.
|
||||
*/
|
||||
size(filePath) {
|
||||
return this.#fsp
|
||||
.stat(filePath)
|
||||
.then(stat => stat.size)
|
||||
.catch(error => {
|
||||
if (error.code === "ENOENT") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the last modified date of a file or directory. This method handles ENOENT errors
|
||||
* and returns undefined in that case.
|
||||
* @param {string|URL} fileOrDirPath The path to the file to read.
|
||||
* @returns {Promise<Date|undefined>} A promise that resolves with the last modified
|
||||
* date of the file or directory, or undefined if the file doesn't exist.
|
||||
*/
|
||||
lastModified(fileOrDirPath) {
|
||||
return this.#fsp
|
||||
.stat(fileOrDirPath)
|
||||
.then(stat => stat.mtime)
|
||||
.catch(error => {
|
||||
if (error.code === "ENOENT") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies a file from one location to another.
|
||||
* @param {string|URL} source The path to the file to copy.
|
||||
* @param {string|URL} destination The path to copy the file to.
|
||||
* @returns {Promise<void>} A promise that resolves when the file is copied.
|
||||
* @throws {Error} If the source file does not exist.
|
||||
* @throws {Error} If the source file is a directory.
|
||||
* @throws {Error} If the destination file is a directory.
|
||||
*/
|
||||
copy(source, destination) {
|
||||
return this.#fsp.copyFile(source, destination);
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies a file or directory from one location to another.
|
||||
* @param {string|URL} source The path to the file or directory to copy.
|
||||
* @param {string|URL} destination The path to copy the file or directory to.
|
||||
* @returns {Promise<void>} A promise that resolves when the file or directory is
|
||||
* copied.
|
||||
* @throws {Error} If the source file or directory does not exist.
|
||||
* @throws {Error} If the destination file or directory is a directory.
|
||||
*/
|
||||
async copyAll(source, destination) {
|
||||
// for files use copy() and exit
|
||||
if (await this.isFile(source)) {
|
||||
return this.copy(source, destination);
|
||||
}
|
||||
|
||||
const sourceStr =
|
||||
source instanceof URL ? fileURLToPath(source) : source;
|
||||
|
||||
const destinationStr =
|
||||
destination instanceof URL
|
||||
? fileURLToPath(destination)
|
||||
: destination;
|
||||
|
||||
// for directories, create the destination directory and copy each entry
|
||||
await this.createDirectory(destination);
|
||||
|
||||
for await (const entry of this.list(source)) {
|
||||
const fromEntryPath = path.join(sourceStr, entry.name);
|
||||
const toEntryPath = path.join(destinationStr, entry.name);
|
||||
|
||||
if (entry.isDirectory) {
|
||||
await this.copyAll(fromEntryPath, toEntryPath);
|
||||
} else {
|
||||
await this.copy(fromEntryPath, toEntryPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves a file from the source path to the destination path.
|
||||
* @param {string|URL} source The location of the file to move.
|
||||
* @param {string|URL} destination The destination of the file to move.
|
||||
* @returns {Promise<void>} A promise that resolves when the move is complete.
|
||||
* @throws {TypeError} If the file paths are not strings.
|
||||
* @throws {Error} If the file cannot be moved.
|
||||
*/
|
||||
move(source, destination) {
|
||||
return this.#fsp.stat(source).then(stat => {
|
||||
if (stat.isDirectory()) {
|
||||
throw new Error(
|
||||
`EISDIR: illegal operation on a directory, move '${source}' -> '${destination}'`,
|
||||
);
|
||||
}
|
||||
|
||||
return this.#fsp.rename(source, destination);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves a file or directory from the source path to the destination path.
|
||||
* @param {string|URL} source The location of the file or directory to move.
|
||||
* @param {string|URL} destination The destination of the file or directory to move.
|
||||
* @returns {Promise<void>} A promise that resolves when the move is complete.
|
||||
* @throws {TypeError} If the file paths are not strings.
|
||||
* @throws {Error} If the file or directory cannot be moved.
|
||||
*/
|
||||
async moveAll(source, destination) {
|
||||
return this.#fsp.rename(source, destination);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A class representing a file system utility library.
|
||||
* @implements {HfsImpl}
|
||||
*/
|
||||
export class NodeHfs extends Hfs {
|
||||
/**
|
||||
* Creates a new instance.
|
||||
* @param {object} [options] The options for the instance.
|
||||
* @param {Fsp} [options.fsp] The file system module to use.
|
||||
*/
|
||||
constructor({ fsp } = {}) {
|
||||
super({ impl: new NodeHfsImpl({ fsp }) });
|
||||
}
|
||||
}
|
||||
|
||||
export const hfs = new NodeHfs();
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2":"K D E F A B mC"},B:{"1":"EB FB GB HB IB JB KB LB MB NB OB I","2":"C L M G N O P Q H R S T U V W X Y Z a b","132":"0 9 c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB"},C:{"2":"0 1 2 3 4 5 6 7 8 9 nC LC J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC qC rC"},D:{"1":"EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","2":"1 2 3 4 5 6 7 8 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b","132":"0 9 c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB"},E:{"2":"J PB K D E F A B C L M G sC SC tC uC vC wC TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C"},F:{"16":"0 1 2 3 4 5 6 7 8 F B C G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z 4C 5C 6C 7C FC kC 8C GC"},G:{"2":"E SC 9C lC AD BD CD DD ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC"},H:{"2":"WD"},I:{"2":"LC J I XD YD ZD aD lC bD cD"},J:{"2":"D A"},K:{"2":"A B C H FC kC GC"},L:{"194":"I"},M:{"2":"EC"},N:{"2":"A B"},O:{"2":"HC"},P:{"2":"1 2 3 4 5 6 7 8 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"2":"oD"},R:{"2":"pD"},S:{"2":"qD rD"}},B:1,C:"CSS Module Scripts",D:false};
|
||||
@@ -0,0 +1,232 @@
|
||||
/**
|
||||
* @fileoverview A rule to ensure whitespace before blocks.
|
||||
* @author Mathias Schreck <https://github.com/lo1tuma>
|
||||
* @deprecated in ESLint v8.53.0
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const astUtils = require("./utils/ast-utils");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Checks whether the given node represents the body of a function.
|
||||
* @param {ASTNode} node the node to check.
|
||||
* @returns {boolean} `true` if the node is function body.
|
||||
*/
|
||||
function isFunctionBody(node) {
|
||||
const parent = node.parent;
|
||||
|
||||
return (
|
||||
node.type === "BlockStatement" &&
|
||||
astUtils.isFunction(parent) &&
|
||||
parent.body === node
|
||||
);
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/** @type {import('../shared/types').Rule} */
|
||||
module.exports = {
|
||||
meta: {
|
||||
deprecated: {
|
||||
message: "Formatting rules are being moved out of ESLint core.",
|
||||
url: "https://eslint.org/blog/2023/10/deprecating-formatting-rules/",
|
||||
deprecatedSince: "8.53.0",
|
||||
availableUntil: "10.0.0",
|
||||
replacedBy: [
|
||||
{
|
||||
message:
|
||||
"ESLint Stylistic now maintains deprecated stylistic core rules.",
|
||||
url: "https://eslint.style/guide/migration",
|
||||
plugin: {
|
||||
name: "@stylistic/eslint-plugin-js",
|
||||
url: "https://eslint.style/packages/js",
|
||||
},
|
||||
rule: {
|
||||
name: "space-before-blocks",
|
||||
url: "https://eslint.style/rules/js/space-before-blocks",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
type: "layout",
|
||||
|
||||
docs: {
|
||||
description: "Enforce consistent spacing before blocks",
|
||||
recommended: false,
|
||||
url: "https://eslint.org/docs/latest/rules/space-before-blocks",
|
||||
},
|
||||
|
||||
fixable: "whitespace",
|
||||
|
||||
schema: [
|
||||
{
|
||||
oneOf: [
|
||||
{
|
||||
enum: ["always", "never"],
|
||||
},
|
||||
{
|
||||
type: "object",
|
||||
properties: {
|
||||
keywords: {
|
||||
enum: ["always", "never", "off"],
|
||||
},
|
||||
functions: {
|
||||
enum: ["always", "never", "off"],
|
||||
},
|
||||
classes: {
|
||||
enum: ["always", "never", "off"],
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
|
||||
messages: {
|
||||
unexpectedSpace: "Unexpected space before opening brace.",
|
||||
missingSpace: "Missing space before opening brace.",
|
||||
},
|
||||
},
|
||||
|
||||
create(context) {
|
||||
const config = context.options[0],
|
||||
sourceCode = context.sourceCode;
|
||||
let alwaysFunctions = true,
|
||||
alwaysKeywords = true,
|
||||
alwaysClasses = true,
|
||||
neverFunctions = false,
|
||||
neverKeywords = false,
|
||||
neverClasses = false;
|
||||
|
||||
if (typeof config === "object") {
|
||||
alwaysFunctions = config.functions === "always";
|
||||
alwaysKeywords = config.keywords === "always";
|
||||
alwaysClasses = config.classes === "always";
|
||||
neverFunctions = config.functions === "never";
|
||||
neverKeywords = config.keywords === "never";
|
||||
neverClasses = config.classes === "never";
|
||||
} else if (config === "never") {
|
||||
alwaysFunctions = false;
|
||||
alwaysKeywords = false;
|
||||
alwaysClasses = false;
|
||||
neverFunctions = true;
|
||||
neverKeywords = true;
|
||||
neverClasses = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the spacing before the given block is already controlled by another rule:
|
||||
* - `arrow-spacing` checks spaces after `=>`.
|
||||
* - `keyword-spacing` checks spaces after keywords in certain contexts.
|
||||
* - `switch-colon-spacing` checks spaces after `:` of switch cases.
|
||||
* @param {Token} precedingToken first token before the block.
|
||||
* @param {ASTNode|Token} node `BlockStatement` node or `{` token of a `SwitchStatement` node.
|
||||
* @returns {boolean} `true` if requiring or disallowing spaces before the given block could produce conflicts with other rules.
|
||||
*/
|
||||
function isConflicted(precedingToken, node) {
|
||||
return (
|
||||
astUtils.isArrowToken(precedingToken) ||
|
||||
(astUtils.isKeywordToken(precedingToken) &&
|
||||
!isFunctionBody(node)) ||
|
||||
(astUtils.isColonToken(precedingToken) &&
|
||||
node.parent &&
|
||||
node.parent.type === "SwitchCase" &&
|
||||
precedingToken ===
|
||||
astUtils.getSwitchCaseColonToken(
|
||||
node.parent,
|
||||
sourceCode,
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the given BlockStatement node has a preceding space if it doesn’t start on a new line.
|
||||
* @param {ASTNode|Token} node The AST node of a BlockStatement.
|
||||
* @returns {void} undefined.
|
||||
*/
|
||||
function checkPrecedingSpace(node) {
|
||||
const precedingToken = sourceCode.getTokenBefore(node);
|
||||
|
||||
if (
|
||||
precedingToken &&
|
||||
!isConflicted(precedingToken, node) &&
|
||||
astUtils.isTokenOnSameLine(precedingToken, node)
|
||||
) {
|
||||
const hasSpace = sourceCode.isSpaceBetweenTokens(
|
||||
precedingToken,
|
||||
node,
|
||||
);
|
||||
let requireSpace;
|
||||
let requireNoSpace;
|
||||
|
||||
if (isFunctionBody(node)) {
|
||||
requireSpace = alwaysFunctions;
|
||||
requireNoSpace = neverFunctions;
|
||||
} else if (node.type === "ClassBody") {
|
||||
requireSpace = alwaysClasses;
|
||||
requireNoSpace = neverClasses;
|
||||
} else {
|
||||
requireSpace = alwaysKeywords;
|
||||
requireNoSpace = neverKeywords;
|
||||
}
|
||||
|
||||
if (requireSpace && !hasSpace) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: "missingSpace",
|
||||
fix(fixer) {
|
||||
return fixer.insertTextBefore(node, " ");
|
||||
},
|
||||
});
|
||||
} else if (requireNoSpace && hasSpace) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: "unexpectedSpace",
|
||||
fix(fixer) {
|
||||
return fixer.removeRange([
|
||||
precedingToken.range[1],
|
||||
node.range[0],
|
||||
]);
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the CaseBlock of an given SwitchStatement node has a preceding space.
|
||||
* @param {ASTNode} node The node of a SwitchStatement.
|
||||
* @returns {void} undefined.
|
||||
*/
|
||||
function checkSpaceBeforeCaseBlock(node) {
|
||||
const cases = node.cases;
|
||||
let openingBrace;
|
||||
|
||||
if (cases.length > 0) {
|
||||
openingBrace = sourceCode.getTokenBefore(cases[0]);
|
||||
} else {
|
||||
openingBrace = sourceCode.getLastToken(node, 1);
|
||||
}
|
||||
|
||||
checkPrecedingSpace(openingBrace);
|
||||
}
|
||||
|
||||
return {
|
||||
BlockStatement: checkPrecedingSpace,
|
||||
ClassBody: checkPrecedingSpace,
|
||||
SwitchStatement: checkSpaceBeforeCaseBlock,
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,12 @@
|
||||
declare type GeneratedColumn = number;
|
||||
declare type SourcesIndex = number;
|
||||
declare type SourceLine = number;
|
||||
declare type SourceColumn = number;
|
||||
declare type NamesIndex = number;
|
||||
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||
export declare const COLUMN = 0;
|
||||
export declare const SOURCES_INDEX = 1;
|
||||
export declare const SOURCE_LINE = 2;
|
||||
export declare const SOURCE_COLUMN = 3;
|
||||
export declare const NAMES_INDEX = 4;
|
||||
export {};
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2":"K D E F A B mC"},B:{"1":"0 9 Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I","2":"C L M G N O P"},C:{"1":"0 1 2 3 4 5 6 7 8 9 G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"nC LC J PB K D E F A B C L M qC rC"},D:{"1":"0 1 2 3 4 5 6 7 8 9 G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","16":"J PB K D E F A B C L M"},E:{"1":"K D E F A B C L M G tC uC vC wC TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","2":"J PB sC SC"},F:{"1":"0 1 2 3 4 5 6 7 8 C G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z 8C GC","2":"F B 4C 5C 6C 7C FC kC"},G:{"1":"E AD BD CD DD ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","16":"SC 9C lC"},H:{"1":"WD"},I:{"1":"I bD cD","16":"LC J XD YD ZD aD lC"},J:{"16":"D A"},K:{"1":"C H GC","2":"A B FC kC"},L:{"1":"I"},M:{"1":"EC"},N:{"2":"A B"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"1":"oD"},R:{"1":"pD"},S:{"1":"qD rD"}},B:4,C:"SVG vector-effect: non-scaling-stroke",D:true};
|
||||
@@ -0,0 +1,16 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
||||
const useMatch = require("./useMatch.cjs");
|
||||
function useSearch(opts) {
|
||||
return useMatch.useMatch({
|
||||
from: opts.from,
|
||||
strict: opts.strict,
|
||||
shouldThrow: opts.shouldThrow,
|
||||
structuralSharing: opts.structuralSharing,
|
||||
select: (match) => {
|
||||
return opts.select ? opts.select(match.search) : match.search;
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.useSearch = useSearch;
|
||||
//# sourceMappingURL=useSearch.cjs.map
|
||||
@@ -0,0 +1,11 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = _readOnlyError;
|
||||
function _readOnlyError(name) {
|
||||
throw new TypeError('"' + name + '" is read-only');
|
||||
}
|
||||
|
||||
//# sourceMappingURL=readOnlyError.js.map
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"1":"A B","2":"K D E F mC"},B:{"1":"0 9 C L M G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I"},C:{"1":"0 4 5 6 7 8 9 RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"1 2 3 nC LC J PB K D E F A B C L M G N O P QB qC rC"},D:{"1":"0 1 2 3 4 5 6 7 8 9 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC"},E:{"1":"J PB K D E F A B C L M G sC SC tC uC vC wC TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C"},F:{"1":"0 1 2 3 4 5 6 7 8 F B C G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z 4C 5C 6C 7C FC kC 8C GC"},G:{"1":"E AD BD CD DD ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","2":"SC 9C lC"},H:{"2":"WD"},I:{"1":"I lC bD cD","4":"LC J XD YD ZD aD"},J:{"1":"D A"},K:{"1":"A B C H FC kC GC"},L:{"1":"I"},M:{"1":"EC"},N:{"1":"A B"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"1":"oD"},R:{"1":"pD"},S:{"1":"qD rD"}},B:1,C:"Range input type",D:true};
|
||||
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* @fileoverview Rule to restrict what can be thrown as an exception.
|
||||
* @author Dieter Oberkofler
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const astUtils = require("./utils/ast-utils");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/** @type {import('../shared/types').Rule} */
|
||||
module.exports = {
|
||||
meta: {
|
||||
type: "suggestion",
|
||||
|
||||
docs: {
|
||||
description: "Disallow throwing literals as exceptions",
|
||||
recommended: false,
|
||||
url: "https://eslint.org/docs/latest/rules/no-throw-literal",
|
||||
},
|
||||
|
||||
schema: [],
|
||||
|
||||
messages: {
|
||||
object: "Expected an error object to be thrown.",
|
||||
undef: "Do not throw undefined.",
|
||||
},
|
||||
},
|
||||
|
||||
create(context) {
|
||||
return {
|
||||
ThrowStatement(node) {
|
||||
if (!astUtils.couldBeError(node.argument)) {
|
||||
context.report({ node, messageId: "object" });
|
||||
} else if (node.argument.type === "Identifier") {
|
||||
if (node.argument.name === "undefined") {
|
||||
context.report({ node, messageId: "undef" });
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,683 @@
|
||||
/**
|
||||
* @fileoverview Restrict usage of specified node imports.
|
||||
* @author Guy Ellis
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const astUtils = require("./utils/ast-utils");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const ignore = require("ignore");
|
||||
|
||||
const arrayOfStringsOrObjects = {
|
||||
type: "array",
|
||||
items: {
|
||||
anyOf: [
|
||||
{ type: "string" },
|
||||
{
|
||||
type: "object",
|
||||
properties: {
|
||||
name: { type: "string" },
|
||||
message: {
|
||||
type: "string",
|
||||
minLength: 1,
|
||||
},
|
||||
importNames: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
allowImportNames: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ["name"],
|
||||
not: { required: ["importNames", "allowImportNames"] },
|
||||
},
|
||||
],
|
||||
},
|
||||
uniqueItems: true,
|
||||
};
|
||||
|
||||
const arrayOfStringsOrObjectPatterns = {
|
||||
anyOf: [
|
||||
{
|
||||
type: "array",
|
||||
items: {
|
||||
type: "string",
|
||||
},
|
||||
uniqueItems: true,
|
||||
},
|
||||
{
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
importNames: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "string",
|
||||
},
|
||||
minItems: 1,
|
||||
uniqueItems: true,
|
||||
},
|
||||
allowImportNames: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "string",
|
||||
},
|
||||
minItems: 1,
|
||||
uniqueItems: true,
|
||||
},
|
||||
group: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "string",
|
||||
},
|
||||
minItems: 1,
|
||||
uniqueItems: true,
|
||||
},
|
||||
regex: {
|
||||
type: "string",
|
||||
},
|
||||
importNamePattern: {
|
||||
type: "string",
|
||||
},
|
||||
allowImportNamePattern: {
|
||||
type: "string",
|
||||
},
|
||||
message: {
|
||||
type: "string",
|
||||
minLength: 1,
|
||||
},
|
||||
caseSensitive: {
|
||||
type: "boolean",
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
not: {
|
||||
anyOf: [
|
||||
{ required: ["importNames", "allowImportNames"] },
|
||||
{
|
||||
required: [
|
||||
"importNamePattern",
|
||||
"allowImportNamePattern",
|
||||
],
|
||||
},
|
||||
{ required: ["importNames", "allowImportNamePattern"] },
|
||||
{ required: ["importNamePattern", "allowImportNames"] },
|
||||
{
|
||||
required: [
|
||||
"allowImportNames",
|
||||
"allowImportNamePattern",
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
oneOf: [{ required: ["group"] }, { required: ["regex"] }],
|
||||
},
|
||||
uniqueItems: true,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
/** @type {import('../shared/types').Rule} */
|
||||
module.exports = {
|
||||
meta: {
|
||||
type: "suggestion",
|
||||
|
||||
docs: {
|
||||
description: "Disallow specified modules when loaded by `import`",
|
||||
recommended: false,
|
||||
url: "https://eslint.org/docs/latest/rules/no-restricted-imports",
|
||||
},
|
||||
|
||||
messages: {
|
||||
path: "'{{importSource}}' import is restricted from being used.",
|
||||
pathWithCustomMessage:
|
||||
// eslint-disable-next-line eslint-plugin/report-message-format -- Custom message might not end in a period
|
||||
"'{{importSource}}' import is restricted from being used. {{customMessage}}",
|
||||
|
||||
patterns:
|
||||
"'{{importSource}}' import is restricted from being used by a pattern.",
|
||||
patternWithCustomMessage:
|
||||
// eslint-disable-next-line eslint-plugin/report-message-format -- Custom message might not end in a period
|
||||
"'{{importSource}}' import is restricted from being used by a pattern. {{customMessage}}",
|
||||
|
||||
patternAndImportName:
|
||||
"'{{importName}}' import from '{{importSource}}' is restricted from being used by a pattern.",
|
||||
patternAndImportNameWithCustomMessage:
|
||||
// eslint-disable-next-line eslint-plugin/report-message-format -- Custom message might not end in a period
|
||||
"'{{importName}}' import from '{{importSource}}' is restricted from being used by a pattern. {{customMessage}}",
|
||||
|
||||
patternAndEverything:
|
||||
"* import is invalid because '{{importNames}}' from '{{importSource}}' is restricted from being used by a pattern.",
|
||||
|
||||
patternAndEverythingWithRegexImportName:
|
||||
"* import is invalid because import name matching '{{importNames}}' pattern from '{{importSource}}' is restricted from being used.",
|
||||
patternAndEverythingWithCustomMessage:
|
||||
// eslint-disable-next-line eslint-plugin/report-message-format -- Custom message might not end in a period
|
||||
"* import is invalid because '{{importNames}}' from '{{importSource}}' is restricted from being used by a pattern. {{customMessage}}",
|
||||
patternAndEverythingWithRegexImportNameAndCustomMessage:
|
||||
// eslint-disable-next-line eslint-plugin/report-message-format -- Custom message might not end in a period
|
||||
"* import is invalid because import name matching '{{importNames}}' pattern from '{{importSource}}' is restricted from being used. {{customMessage}}",
|
||||
|
||||
everything:
|
||||
"* import is invalid because '{{importNames}}' from '{{importSource}}' is restricted.",
|
||||
everythingWithCustomMessage:
|
||||
// eslint-disable-next-line eslint-plugin/report-message-format -- Custom message might not end in a period
|
||||
"* import is invalid because '{{importNames}}' from '{{importSource}}' is restricted. {{customMessage}}",
|
||||
|
||||
importName:
|
||||
"'{{importName}}' import from '{{importSource}}' is restricted.",
|
||||
importNameWithCustomMessage:
|
||||
// eslint-disable-next-line eslint-plugin/report-message-format -- Custom message might not end in a period
|
||||
"'{{importName}}' import from '{{importSource}}' is restricted. {{customMessage}}",
|
||||
|
||||
allowedImportName:
|
||||
"'{{importName}}' import from '{{importSource}}' is restricted because only '{{allowedImportNames}}' import(s) is/are allowed.",
|
||||
allowedImportNameWithCustomMessage:
|
||||
// eslint-disable-next-line eslint-plugin/report-message-format -- Custom message might not end in a period
|
||||
"'{{importName}}' import from '{{importSource}}' is restricted because only '{{allowedImportNames}}' import(s) is/are allowed. {{customMessage}}",
|
||||
|
||||
everythingWithAllowImportNames:
|
||||
"* import is invalid because only '{{allowedImportNames}}' from '{{importSource}}' is/are allowed.",
|
||||
everythingWithAllowImportNamesAndCustomMessage:
|
||||
// eslint-disable-next-line eslint-plugin/report-message-format -- Custom message might not end in a period
|
||||
"* import is invalid because only '{{allowedImportNames}}' from '{{importSource}}' is/are allowed. {{customMessage}}",
|
||||
|
||||
allowedImportNamePattern:
|
||||
"'{{importName}}' import from '{{importSource}}' is restricted because only imports that match the pattern '{{allowedImportNamePattern}}' are allowed from '{{importSource}}'.",
|
||||
allowedImportNamePatternWithCustomMessage:
|
||||
// eslint-disable-next-line eslint-plugin/report-message-format -- Custom message might not end in a period
|
||||
"'{{importName}}' import from '{{importSource}}' is restricted because only imports that match the pattern '{{allowedImportNamePattern}}' are allowed from '{{importSource}}'. {{customMessage}}",
|
||||
|
||||
everythingWithAllowedImportNamePattern:
|
||||
"* import is invalid because only imports that match the pattern '{{allowedImportNamePattern}}' from '{{importSource}}' are allowed.",
|
||||
everythingWithAllowedImportNamePatternWithCustomMessage:
|
||||
// eslint-disable-next-line eslint-plugin/report-message-format -- Custom message might not end in a period
|
||||
"* import is invalid because only imports that match the pattern '{{allowedImportNamePattern}}' from '{{importSource}}' are allowed. {{customMessage}}",
|
||||
},
|
||||
|
||||
schema: {
|
||||
anyOf: [
|
||||
arrayOfStringsOrObjects,
|
||||
{
|
||||
type: "array",
|
||||
items: [
|
||||
{
|
||||
type: "object",
|
||||
properties: {
|
||||
paths: arrayOfStringsOrObjects,
|
||||
patterns: arrayOfStringsOrObjectPatterns,
|
||||
},
|
||||
additionalProperties: false,
|
||||
},
|
||||
],
|
||||
additionalItems: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
|
||||
create(context) {
|
||||
const sourceCode = context.sourceCode;
|
||||
const options = Array.isArray(context.options) ? context.options : [];
|
||||
const isPathAndPatternsObject =
|
||||
typeof options[0] === "object" &&
|
||||
(Object.hasOwn(options[0], "paths") ||
|
||||
Object.hasOwn(options[0], "patterns"));
|
||||
|
||||
const restrictedPaths =
|
||||
(isPathAndPatternsObject ? options[0].paths : context.options) ||
|
||||
[];
|
||||
const groupedRestrictedPaths = restrictedPaths.reduce(
|
||||
(memo, importSource) => {
|
||||
const path =
|
||||
typeof importSource === "string"
|
||||
? importSource
|
||||
: importSource.name;
|
||||
|
||||
if (!memo[path]) {
|
||||
memo[path] = [];
|
||||
}
|
||||
|
||||
if (typeof importSource === "string") {
|
||||
memo[path].push({});
|
||||
} else {
|
||||
memo[path].push({
|
||||
message: importSource.message,
|
||||
importNames: importSource.importNames,
|
||||
allowImportNames: importSource.allowImportNames,
|
||||
});
|
||||
}
|
||||
return memo;
|
||||
},
|
||||
Object.create(null),
|
||||
);
|
||||
|
||||
// Handle patterns too, either as strings or groups
|
||||
let restrictedPatterns =
|
||||
(isPathAndPatternsObject ? options[0].patterns : []) || [];
|
||||
|
||||
// standardize to array of objects if we have an array of strings
|
||||
if (
|
||||
restrictedPatterns.length > 0 &&
|
||||
typeof restrictedPatterns[0] === "string"
|
||||
) {
|
||||
restrictedPatterns = [{ group: restrictedPatterns }];
|
||||
}
|
||||
|
||||
// relative paths are supported for this rule
|
||||
const restrictedPatternGroups = restrictedPatterns.map(
|
||||
({
|
||||
group,
|
||||
regex,
|
||||
message,
|
||||
caseSensitive,
|
||||
importNames,
|
||||
importNamePattern,
|
||||
allowImportNames,
|
||||
allowImportNamePattern,
|
||||
}) => ({
|
||||
...(group
|
||||
? {
|
||||
matcher: ignore({
|
||||
allowRelativePaths: true,
|
||||
ignorecase: !caseSensitive,
|
||||
}).add(group),
|
||||
}
|
||||
: {}),
|
||||
...(typeof regex === "string"
|
||||
? {
|
||||
regexMatcher: new RegExp(
|
||||
regex,
|
||||
caseSensitive ? "u" : "iu",
|
||||
),
|
||||
}
|
||||
: {}),
|
||||
customMessage: message,
|
||||
importNames,
|
||||
importNamePattern,
|
||||
allowImportNames,
|
||||
allowImportNamePattern,
|
||||
}),
|
||||
);
|
||||
|
||||
// if no imports are restricted we don't need to check
|
||||
if (
|
||||
Object.keys(restrictedPaths).length === 0 &&
|
||||
restrictedPatternGroups.length === 0
|
||||
) {
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Report a restricted path.
|
||||
* @param {string} importSource path of the import
|
||||
* @param {Map<string,Object[]>} importNames Map of import names that are being imported
|
||||
* @param {node} node representing the restricted path reference
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
function checkRestrictedPathAndReport(importSource, importNames, node) {
|
||||
if (!Object.hasOwn(groupedRestrictedPaths, importSource)) {
|
||||
return;
|
||||
}
|
||||
|
||||
groupedRestrictedPaths[importSource].forEach(
|
||||
restrictedPathEntry => {
|
||||
const customMessage = restrictedPathEntry.message;
|
||||
const restrictedImportNames =
|
||||
restrictedPathEntry.importNames;
|
||||
const allowedImportNames =
|
||||
restrictedPathEntry.allowImportNames;
|
||||
|
||||
if (!restrictedImportNames && !allowedImportNames) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: customMessage
|
||||
? "pathWithCustomMessage"
|
||||
: "path",
|
||||
data: {
|
||||
importSource,
|
||||
customMessage,
|
||||
},
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
importNames.forEach((specifiers, importName) => {
|
||||
if (importName === "*") {
|
||||
const [specifier] = specifiers;
|
||||
|
||||
if (restrictedImportNames) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: customMessage
|
||||
? "everythingWithCustomMessage"
|
||||
: "everything",
|
||||
loc: specifier.loc,
|
||||
data: {
|
||||
importSource,
|
||||
importNames: restrictedImportNames,
|
||||
customMessage,
|
||||
},
|
||||
});
|
||||
} else if (allowedImportNames) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: customMessage
|
||||
? "everythingWithAllowImportNamesAndCustomMessage"
|
||||
: "everythingWithAllowImportNames",
|
||||
loc: specifier.loc,
|
||||
data: {
|
||||
importSource,
|
||||
allowedImportNames,
|
||||
customMessage,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
restrictedImportNames &&
|
||||
restrictedImportNames.includes(importName)
|
||||
) {
|
||||
specifiers.forEach(specifier => {
|
||||
context.report({
|
||||
node,
|
||||
messageId: customMessage
|
||||
? "importNameWithCustomMessage"
|
||||
: "importName",
|
||||
loc: specifier.loc,
|
||||
data: {
|
||||
importSource,
|
||||
customMessage,
|
||||
importName,
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
allowedImportNames &&
|
||||
!allowedImportNames.includes(importName)
|
||||
) {
|
||||
specifiers.forEach(specifier => {
|
||||
context.report({
|
||||
node,
|
||||
loc: specifier.loc,
|
||||
messageId: customMessage
|
||||
? "allowedImportNameWithCustomMessage"
|
||||
: "allowedImportName",
|
||||
data: {
|
||||
importSource,
|
||||
customMessage,
|
||||
importName,
|
||||
allowedImportNames,
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Report a restricted path specifically for patterns.
|
||||
* @param {node} node representing the restricted path reference
|
||||
* @param {Object} group contains an Ignore instance for paths, the customMessage to show on failure,
|
||||
* and any restricted import names that have been specified in the config
|
||||
* @param {Map<string,Object[]>} importNames Map of import names that are being imported
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
function reportPathForPatterns(node, group, importNames) {
|
||||
const importSource = node.source.value.trim();
|
||||
|
||||
const customMessage = group.customMessage;
|
||||
const restrictedImportNames = group.importNames;
|
||||
const restrictedImportNamePattern = group.importNamePattern
|
||||
? new RegExp(group.importNamePattern, "u")
|
||||
: null;
|
||||
const allowedImportNames = group.allowImportNames;
|
||||
const allowedImportNamePattern = group.allowImportNamePattern
|
||||
? new RegExp(group.allowImportNamePattern, "u")
|
||||
: null;
|
||||
|
||||
/**
|
||||
* If we are not restricting to any specific import names and just the pattern itself,
|
||||
* report the error and move on
|
||||
*/
|
||||
if (
|
||||
!restrictedImportNames &&
|
||||
!allowedImportNames &&
|
||||
!restrictedImportNamePattern &&
|
||||
!allowedImportNamePattern
|
||||
) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: customMessage
|
||||
? "patternWithCustomMessage"
|
||||
: "patterns",
|
||||
data: {
|
||||
importSource,
|
||||
customMessage,
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
importNames.forEach((specifiers, importName) => {
|
||||
if (importName === "*") {
|
||||
const [specifier] = specifiers;
|
||||
|
||||
if (restrictedImportNames) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: customMessage
|
||||
? "patternAndEverythingWithCustomMessage"
|
||||
: "patternAndEverything",
|
||||
loc: specifier.loc,
|
||||
data: {
|
||||
importSource,
|
||||
importNames: restrictedImportNames,
|
||||
customMessage,
|
||||
},
|
||||
});
|
||||
} else if (allowedImportNames) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: customMessage
|
||||
? "everythingWithAllowImportNamesAndCustomMessage"
|
||||
: "everythingWithAllowImportNames",
|
||||
loc: specifier.loc,
|
||||
data: {
|
||||
importSource,
|
||||
allowedImportNames,
|
||||
customMessage,
|
||||
},
|
||||
});
|
||||
} else if (allowedImportNamePattern) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: customMessage
|
||||
? "everythingWithAllowedImportNamePatternWithCustomMessage"
|
||||
: "everythingWithAllowedImportNamePattern",
|
||||
loc: specifier.loc,
|
||||
data: {
|
||||
importSource,
|
||||
allowedImportNamePattern,
|
||||
customMessage,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
context.report({
|
||||
node,
|
||||
messageId: customMessage
|
||||
? "patternAndEverythingWithRegexImportNameAndCustomMessage"
|
||||
: "patternAndEverythingWithRegexImportName",
|
||||
loc: specifier.loc,
|
||||
data: {
|
||||
importSource,
|
||||
importNames: restrictedImportNamePattern,
|
||||
customMessage,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
(restrictedImportNames &&
|
||||
restrictedImportNames.includes(importName)) ||
|
||||
(restrictedImportNamePattern &&
|
||||
restrictedImportNamePattern.test(importName))
|
||||
) {
|
||||
specifiers.forEach(specifier => {
|
||||
context.report({
|
||||
node,
|
||||
messageId: customMessage
|
||||
? "patternAndImportNameWithCustomMessage"
|
||||
: "patternAndImportName",
|
||||
loc: specifier.loc,
|
||||
data: {
|
||||
importSource,
|
||||
customMessage,
|
||||
importName,
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
allowedImportNames &&
|
||||
!allowedImportNames.includes(importName)
|
||||
) {
|
||||
specifiers.forEach(specifier => {
|
||||
context.report({
|
||||
node,
|
||||
messageId: customMessage
|
||||
? "allowedImportNameWithCustomMessage"
|
||||
: "allowedImportName",
|
||||
loc: specifier.loc,
|
||||
data: {
|
||||
importSource,
|
||||
customMessage,
|
||||
importName,
|
||||
allowedImportNames,
|
||||
},
|
||||
});
|
||||
});
|
||||
} else if (
|
||||
allowedImportNamePattern &&
|
||||
!allowedImportNamePattern.test(importName)
|
||||
) {
|
||||
specifiers.forEach(specifier => {
|
||||
context.report({
|
||||
node,
|
||||
messageId: customMessage
|
||||
? "allowedImportNamePatternWithCustomMessage"
|
||||
: "allowedImportNamePattern",
|
||||
loc: specifier.loc,
|
||||
data: {
|
||||
importSource,
|
||||
customMessage,
|
||||
importName,
|
||||
allowedImportNamePattern,
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the given importSource is restricted by a pattern.
|
||||
* @param {string} importSource path of the import
|
||||
* @param {Object} group contains a Ignore instance for paths, and the customMessage to show if it fails
|
||||
* @returns {boolean} whether the variable is a restricted pattern or not
|
||||
* @private
|
||||
*/
|
||||
function isRestrictedPattern(importSource, group) {
|
||||
return group.regexMatcher
|
||||
? group.regexMatcher.test(importSource)
|
||||
: group.matcher.ignores(importSource);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks a node to see if any problems should be reported.
|
||||
* @param {ASTNode} node The node to check.
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
function checkNode(node) {
|
||||
const importSource = node.source.value.trim();
|
||||
const importNames = new Map();
|
||||
|
||||
if (node.type === "ExportAllDeclaration") {
|
||||
const starToken = sourceCode.getFirstToken(node, 1);
|
||||
|
||||
importNames.set("*", [{ loc: starToken.loc }]);
|
||||
} else if (node.specifiers) {
|
||||
for (const specifier of node.specifiers) {
|
||||
let name;
|
||||
const specifierData = { loc: specifier.loc };
|
||||
|
||||
if (specifier.type === "ImportDefaultSpecifier") {
|
||||
name = "default";
|
||||
} else if (specifier.type === "ImportNamespaceSpecifier") {
|
||||
name = "*";
|
||||
} else if (specifier.imported) {
|
||||
name = astUtils.getModuleExportName(specifier.imported);
|
||||
} else if (specifier.local) {
|
||||
name = astUtils.getModuleExportName(specifier.local);
|
||||
}
|
||||
|
||||
if (typeof name === "string") {
|
||||
if (importNames.has(name)) {
|
||||
importNames.get(name).push(specifierData);
|
||||
} else {
|
||||
importNames.set(name, [specifierData]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
checkRestrictedPathAndReport(importSource, importNames, node);
|
||||
restrictedPatternGroups.forEach(group => {
|
||||
if (isRestrictedPattern(importSource, group)) {
|
||||
reportPathForPatterns(node, group, importNames);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
ImportDeclaration: checkNode,
|
||||
ExportNamedDeclaration(node) {
|
||||
if (node.source) {
|
||||
checkNode(node);
|
||||
}
|
||||
},
|
||||
ExportAllDeclaration: checkNode,
|
||||
};
|
||||
},
|
||||
};
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,266 @@
|
||||
'use strict'
|
||||
|
||||
const SINGLE_QUOTE = "'".charCodeAt(0)
|
||||
const DOUBLE_QUOTE = '"'.charCodeAt(0)
|
||||
const BACKSLASH = '\\'.charCodeAt(0)
|
||||
const SLASH = '/'.charCodeAt(0)
|
||||
const NEWLINE = '\n'.charCodeAt(0)
|
||||
const SPACE = ' '.charCodeAt(0)
|
||||
const FEED = '\f'.charCodeAt(0)
|
||||
const TAB = '\t'.charCodeAt(0)
|
||||
const CR = '\r'.charCodeAt(0)
|
||||
const OPEN_SQUARE = '['.charCodeAt(0)
|
||||
const CLOSE_SQUARE = ']'.charCodeAt(0)
|
||||
const OPEN_PARENTHESES = '('.charCodeAt(0)
|
||||
const CLOSE_PARENTHESES = ')'.charCodeAt(0)
|
||||
const OPEN_CURLY = '{'.charCodeAt(0)
|
||||
const CLOSE_CURLY = '}'.charCodeAt(0)
|
||||
const SEMICOLON = ';'.charCodeAt(0)
|
||||
const ASTERISK = '*'.charCodeAt(0)
|
||||
const COLON = ':'.charCodeAt(0)
|
||||
const AT = '@'.charCodeAt(0)
|
||||
|
||||
const RE_AT_END = /[\t\n\f\r "#'()/;[\\\]{}]/g
|
||||
const RE_WORD_END = /[\t\n\f\r !"#'():;@[\\\]{}]|\/(?=\*)/g
|
||||
const RE_BAD_BRACKET = /.[\r\n"'(/\\]/
|
||||
const RE_HEX_ESCAPE = /[\da-f]/i
|
||||
|
||||
module.exports = function tokenizer(input, options = {}) {
|
||||
let css = input.css.valueOf()
|
||||
let ignore = options.ignoreErrors
|
||||
|
||||
let code, content, escape, next, quote
|
||||
let currentToken, escaped, escapePos, n, prev
|
||||
|
||||
let length = css.length
|
||||
let pos = 0
|
||||
let buffer = []
|
||||
let returned = []
|
||||
|
||||
function position() {
|
||||
return pos
|
||||
}
|
||||
|
||||
function unclosed(what) {
|
||||
throw input.error('Unclosed ' + what, pos)
|
||||
}
|
||||
|
||||
function endOfFile() {
|
||||
return returned.length === 0 && pos >= length
|
||||
}
|
||||
|
||||
function nextToken(opts) {
|
||||
if (returned.length) return returned.pop()
|
||||
if (pos >= length) return
|
||||
|
||||
let ignoreUnclosed = opts ? opts.ignoreUnclosed : false
|
||||
|
||||
code = css.charCodeAt(pos)
|
||||
|
||||
switch (code) {
|
||||
case NEWLINE:
|
||||
case SPACE:
|
||||
case TAB:
|
||||
case CR:
|
||||
case FEED: {
|
||||
next = pos
|
||||
do {
|
||||
next += 1
|
||||
code = css.charCodeAt(next)
|
||||
} while (
|
||||
code === SPACE ||
|
||||
code === NEWLINE ||
|
||||
code === TAB ||
|
||||
code === CR ||
|
||||
code === FEED
|
||||
)
|
||||
|
||||
currentToken = ['space', css.slice(pos, next)]
|
||||
pos = next - 1
|
||||
break
|
||||
}
|
||||
|
||||
case OPEN_SQUARE:
|
||||
case CLOSE_SQUARE:
|
||||
case OPEN_CURLY:
|
||||
case CLOSE_CURLY:
|
||||
case COLON:
|
||||
case SEMICOLON:
|
||||
case CLOSE_PARENTHESES: {
|
||||
let controlChar = String.fromCharCode(code)
|
||||
currentToken = [controlChar, controlChar, pos]
|
||||
break
|
||||
}
|
||||
|
||||
case OPEN_PARENTHESES: {
|
||||
prev = buffer.length ? buffer.pop()[1] : ''
|
||||
n = css.charCodeAt(pos + 1)
|
||||
if (
|
||||
prev === 'url' &&
|
||||
n !== SINGLE_QUOTE &&
|
||||
n !== DOUBLE_QUOTE &&
|
||||
n !== SPACE &&
|
||||
n !== NEWLINE &&
|
||||
n !== TAB &&
|
||||
n !== FEED &&
|
||||
n !== CR
|
||||
) {
|
||||
next = pos
|
||||
do {
|
||||
escaped = false
|
||||
next = css.indexOf(')', next + 1)
|
||||
if (next === -1) {
|
||||
if (ignore || ignoreUnclosed) {
|
||||
next = pos
|
||||
break
|
||||
} else {
|
||||
unclosed('bracket')
|
||||
}
|
||||
}
|
||||
escapePos = next
|
||||
while (css.charCodeAt(escapePos - 1) === BACKSLASH) {
|
||||
escapePos -= 1
|
||||
escaped = !escaped
|
||||
}
|
||||
} while (escaped)
|
||||
|
||||
currentToken = ['brackets', css.slice(pos, next + 1), pos, next]
|
||||
|
||||
pos = next
|
||||
} else {
|
||||
next = css.indexOf(')', pos + 1)
|
||||
content = css.slice(pos, next + 1)
|
||||
|
||||
if (next === -1 || RE_BAD_BRACKET.test(content)) {
|
||||
currentToken = ['(', '(', pos]
|
||||
} else {
|
||||
currentToken = ['brackets', content, pos, next]
|
||||
pos = next
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case SINGLE_QUOTE:
|
||||
case DOUBLE_QUOTE: {
|
||||
quote = code === SINGLE_QUOTE ? "'" : '"'
|
||||
next = pos
|
||||
do {
|
||||
escaped = false
|
||||
next = css.indexOf(quote, next + 1)
|
||||
if (next === -1) {
|
||||
if (ignore || ignoreUnclosed) {
|
||||
next = pos + 1
|
||||
break
|
||||
} else {
|
||||
unclosed('string')
|
||||
}
|
||||
}
|
||||
escapePos = next
|
||||
while (css.charCodeAt(escapePos - 1) === BACKSLASH) {
|
||||
escapePos -= 1
|
||||
escaped = !escaped
|
||||
}
|
||||
} while (escaped)
|
||||
|
||||
currentToken = ['string', css.slice(pos, next + 1), pos, next]
|
||||
pos = next
|
||||
break
|
||||
}
|
||||
|
||||
case AT: {
|
||||
RE_AT_END.lastIndex = pos + 1
|
||||
RE_AT_END.test(css)
|
||||
if (RE_AT_END.lastIndex === 0) {
|
||||
next = css.length - 1
|
||||
} else {
|
||||
next = RE_AT_END.lastIndex - 2
|
||||
}
|
||||
|
||||
currentToken = ['at-word', css.slice(pos, next + 1), pos, next]
|
||||
|
||||
pos = next
|
||||
break
|
||||
}
|
||||
|
||||
case BACKSLASH: {
|
||||
next = pos
|
||||
escape = true
|
||||
while (css.charCodeAt(next + 1) === BACKSLASH) {
|
||||
next += 1
|
||||
escape = !escape
|
||||
}
|
||||
code = css.charCodeAt(next + 1)
|
||||
if (
|
||||
escape &&
|
||||
code !== SLASH &&
|
||||
code !== SPACE &&
|
||||
code !== NEWLINE &&
|
||||
code !== TAB &&
|
||||
code !== CR &&
|
||||
code !== FEED
|
||||
) {
|
||||
next += 1
|
||||
if (RE_HEX_ESCAPE.test(css.charAt(next))) {
|
||||
while (RE_HEX_ESCAPE.test(css.charAt(next + 1))) {
|
||||
next += 1
|
||||
}
|
||||
if (css.charCodeAt(next + 1) === SPACE) {
|
||||
next += 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
currentToken = ['word', css.slice(pos, next + 1), pos, next]
|
||||
|
||||
pos = next
|
||||
break
|
||||
}
|
||||
|
||||
default: {
|
||||
if (code === SLASH && css.charCodeAt(pos + 1) === ASTERISK) {
|
||||
next = css.indexOf('*/', pos + 2) + 1
|
||||
if (next === 0) {
|
||||
if (ignore || ignoreUnclosed) {
|
||||
next = css.length
|
||||
} else {
|
||||
unclosed('comment')
|
||||
}
|
||||
}
|
||||
|
||||
currentToken = ['comment', css.slice(pos, next + 1), pos, next]
|
||||
pos = next
|
||||
} else {
|
||||
RE_WORD_END.lastIndex = pos + 1
|
||||
RE_WORD_END.test(css)
|
||||
if (RE_WORD_END.lastIndex === 0) {
|
||||
next = css.length - 1
|
||||
} else {
|
||||
next = RE_WORD_END.lastIndex - 2
|
||||
}
|
||||
|
||||
currentToken = ['word', css.slice(pos, next + 1), pos, next]
|
||||
buffer.push(currentToken)
|
||||
pos = next
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
pos++
|
||||
return currentToken
|
||||
}
|
||||
|
||||
function back(token) {
|
||||
returned.push(token)
|
||||
}
|
||||
|
||||
return {
|
||||
back,
|
||||
endOfFile,
|
||||
nextToken,
|
||||
position
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
export * from './derived.js';
|
||||
export * from './effect.js';
|
||||
export * from './store.js';
|
||||
export * from './types.js';
|
||||
export * from './scheduler.js';
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,165 @@
|
||||
/**
|
||||
* @fileoverview Rule to flag use of duplicate keys in an object.
|
||||
* @author Ian Christian Myers
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const astUtils = require("./utils/ast-utils");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const GET_KIND = /^(?:init|get)$/u;
|
||||
const SET_KIND = /^(?:init|set)$/u;
|
||||
|
||||
/**
|
||||
* The class which stores properties' information of an object.
|
||||
*/
|
||||
class ObjectInfo {
|
||||
/**
|
||||
* @param {ObjectInfo|null} upper The information of the outer object.
|
||||
* @param {ASTNode} node The ObjectExpression node of this information.
|
||||
*/
|
||||
constructor(upper, node) {
|
||||
this.upper = upper;
|
||||
this.node = node;
|
||||
this.properties = new Map();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the information of the given Property node.
|
||||
* @param {ASTNode} node The Property node to get.
|
||||
* @returns {{get: boolean, set: boolean}} The information of the property.
|
||||
*/
|
||||
getPropertyInfo(node) {
|
||||
const name = astUtils.getStaticPropertyName(node);
|
||||
|
||||
if (!this.properties.has(name)) {
|
||||
this.properties.set(name, { get: false, set: false });
|
||||
}
|
||||
return this.properties.get(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the given property has been defined already or not.
|
||||
* @param {ASTNode} node The Property node to check.
|
||||
* @returns {boolean} `true` if the property has been defined.
|
||||
*/
|
||||
isPropertyDefined(node) {
|
||||
const entry = this.getPropertyInfo(node);
|
||||
|
||||
return (
|
||||
(GET_KIND.test(node.kind) && entry.get) ||
|
||||
(SET_KIND.test(node.kind) && entry.set)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the given property.
|
||||
* @param {ASTNode} node The Property node to define.
|
||||
* @returns {void}
|
||||
*/
|
||||
defineProperty(node) {
|
||||
const entry = this.getPropertyInfo(node);
|
||||
|
||||
if (GET_KIND.test(node.kind)) {
|
||||
entry.get = true;
|
||||
}
|
||||
if (SET_KIND.test(node.kind)) {
|
||||
entry.set = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/** @type {import('../shared/types').Rule} */
|
||||
module.exports = {
|
||||
meta: {
|
||||
type: "problem",
|
||||
|
||||
docs: {
|
||||
description: "Disallow duplicate keys in object literals",
|
||||
recommended: true,
|
||||
url: "https://eslint.org/docs/latest/rules/no-dupe-keys",
|
||||
},
|
||||
|
||||
schema: [],
|
||||
|
||||
messages: {
|
||||
unexpected: "Duplicate key '{{name}}'.",
|
||||
},
|
||||
},
|
||||
|
||||
create(context) {
|
||||
let info = null;
|
||||
|
||||
return {
|
||||
ObjectExpression(node) {
|
||||
info = new ObjectInfo(info, node);
|
||||
},
|
||||
"ObjectExpression:exit"() {
|
||||
info = info.upper;
|
||||
},
|
||||
|
||||
Property(node) {
|
||||
const name = astUtils.getStaticPropertyName(node);
|
||||
|
||||
// Skip destructuring.
|
||||
if (node.parent.type !== "ObjectExpression") {
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip if the name is not static.
|
||||
if (name === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
/*
|
||||
* Skip if the property node is a proto setter.
|
||||
* Proto setter is a special syntax that sets
|
||||
* object's prototype instead of creating a property.
|
||||
* It can be in one of the following forms:
|
||||
*
|
||||
* __proto__: <expression>
|
||||
* '__proto__': <expression>
|
||||
* "__proto__": <expression>
|
||||
*
|
||||
* Duplicate proto setters produce parsing errors,
|
||||
* so we can just skip them to not interfere with
|
||||
* regular properties named "__proto__".
|
||||
*/
|
||||
if (
|
||||
name === "__proto__" &&
|
||||
node.kind === "init" &&
|
||||
!node.computed &&
|
||||
!node.shorthand &&
|
||||
!node.method
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Reports if the name is defined already.
|
||||
if (info.isPropertyDefined(node)) {
|
||||
context.report({
|
||||
node: info.node,
|
||||
loc: node.key.loc,
|
||||
messageId: "unexpected",
|
||||
data: { name },
|
||||
});
|
||||
}
|
||||
|
||||
// Update info.
|
||||
info.defineProperty(node);
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,216 @@
|
||||
/**
|
||||
* @fileoverview Rule to warn about using dot notation instead of square bracket notation when possible.
|
||||
* @author Josh Perez
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const astUtils = require("./utils/ast-utils");
|
||||
const keywords = require("./utils/keywords");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const validIdentifier = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/u;
|
||||
|
||||
// `null` literal must be handled separately.
|
||||
const literalTypesToCheck = new Set(["string", "boolean"]);
|
||||
|
||||
/** @type {import('../shared/types').Rule} */
|
||||
module.exports = {
|
||||
meta: {
|
||||
type: "suggestion",
|
||||
|
||||
defaultOptions: [
|
||||
{
|
||||
allowKeywords: true,
|
||||
allowPattern: "",
|
||||
},
|
||||
],
|
||||
|
||||
docs: {
|
||||
description: "Enforce dot notation whenever possible",
|
||||
recommended: false,
|
||||
frozen: true,
|
||||
url: "https://eslint.org/docs/latest/rules/dot-notation",
|
||||
},
|
||||
|
||||
schema: [
|
||||
{
|
||||
type: "object",
|
||||
properties: {
|
||||
allowKeywords: {
|
||||
type: "boolean",
|
||||
},
|
||||
allowPattern: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
},
|
||||
],
|
||||
|
||||
fixable: "code",
|
||||
|
||||
messages: {
|
||||
useDot: "[{{key}}] is better written in dot notation.",
|
||||
useBrackets: ".{{key}} is a syntax error.",
|
||||
},
|
||||
},
|
||||
|
||||
create(context) {
|
||||
const [options] = context.options;
|
||||
const allowKeywords = options.allowKeywords;
|
||||
const sourceCode = context.sourceCode;
|
||||
|
||||
let allowPattern;
|
||||
|
||||
if (options.allowPattern) {
|
||||
allowPattern = new RegExp(options.allowPattern, "u");
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the property is valid dot notation
|
||||
* @param {ASTNode} node The dot notation node
|
||||
* @param {string} value Value which is to be checked
|
||||
* @returns {void}
|
||||
*/
|
||||
function checkComputedProperty(node, value) {
|
||||
if (
|
||||
validIdentifier.test(value) &&
|
||||
(allowKeywords || !keywords.includes(String(value))) &&
|
||||
!(allowPattern && allowPattern.test(value))
|
||||
) {
|
||||
const formattedValue =
|
||||
node.property.type === "Literal"
|
||||
? JSON.stringify(value)
|
||||
: `\`${value}\``;
|
||||
|
||||
context.report({
|
||||
node: node.property,
|
||||
messageId: "useDot",
|
||||
data: {
|
||||
key: formattedValue,
|
||||
},
|
||||
*fix(fixer) {
|
||||
const leftBracket = sourceCode.getTokenAfter(
|
||||
node.object,
|
||||
astUtils.isOpeningBracketToken,
|
||||
);
|
||||
const rightBracket = sourceCode.getLastToken(node);
|
||||
const nextToken = sourceCode.getTokenAfter(node);
|
||||
|
||||
// Don't perform any fixes if there are comments inside the brackets.
|
||||
if (
|
||||
sourceCode.commentsExistBetween(
|
||||
leftBracket,
|
||||
rightBracket,
|
||||
)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Replace the brackets by an identifier.
|
||||
if (!node.optional) {
|
||||
yield fixer.insertTextBefore(
|
||||
leftBracket,
|
||||
astUtils.isDecimalInteger(node.object)
|
||||
? " ."
|
||||
: ".",
|
||||
);
|
||||
}
|
||||
yield fixer.replaceTextRange(
|
||||
[leftBracket.range[0], rightBracket.range[1]],
|
||||
value,
|
||||
);
|
||||
|
||||
// Insert a space after the property if it will be connected to the next token.
|
||||
if (
|
||||
nextToken &&
|
||||
rightBracket.range[1] === nextToken.range[0] &&
|
||||
!astUtils.canTokensBeAdjacent(
|
||||
String(value),
|
||||
nextToken,
|
||||
)
|
||||
) {
|
||||
yield fixer.insertTextAfter(node, " ");
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
MemberExpression(node) {
|
||||
if (
|
||||
node.computed &&
|
||||
node.property.type === "Literal" &&
|
||||
(literalTypesToCheck.has(typeof node.property.value) ||
|
||||
astUtils.isNullLiteral(node.property))
|
||||
) {
|
||||
checkComputedProperty(node, node.property.value);
|
||||
}
|
||||
if (
|
||||
node.computed &&
|
||||
astUtils.isStaticTemplateLiteral(node.property)
|
||||
) {
|
||||
checkComputedProperty(
|
||||
node,
|
||||
node.property.quasis[0].value.cooked,
|
||||
);
|
||||
}
|
||||
if (
|
||||
!allowKeywords &&
|
||||
!node.computed &&
|
||||
node.property.type === "Identifier" &&
|
||||
keywords.includes(String(node.property.name))
|
||||
) {
|
||||
context.report({
|
||||
node: node.property,
|
||||
messageId: "useBrackets",
|
||||
data: {
|
||||
key: node.property.name,
|
||||
},
|
||||
*fix(fixer) {
|
||||
const dotToken = sourceCode.getTokenBefore(
|
||||
node.property,
|
||||
);
|
||||
|
||||
// A statement that starts with `let[` is parsed as a destructuring variable declaration, not a MemberExpression.
|
||||
if (
|
||||
node.object.type === "Identifier" &&
|
||||
node.object.name === "let" &&
|
||||
!node.optional
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Don't perform any fixes if there are comments between the dot and the property name.
|
||||
if (
|
||||
sourceCode.commentsExistBetween(
|
||||
dotToken,
|
||||
node.property,
|
||||
)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Replace the identifier to brackets.
|
||||
if (!node.optional) {
|
||||
yield fixer.remove(dotToken);
|
||||
}
|
||||
yield fixer.replaceText(
|
||||
node.property,
|
||||
`["${node.property.name}"]`,
|
||||
);
|
||||
},
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,21 @@
|
||||
export function fetchData(url: any): Promise<Uint8Array>;
|
||||
export class NodeCanvasFactory extends BaseCanvasFactory {
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_createCanvas(width: any, height: any): any;
|
||||
}
|
||||
export class NodeCMapReaderFactory extends BaseCMapReaderFactory {
|
||||
}
|
||||
export class NodeFilterFactory extends BaseFilterFactory {
|
||||
}
|
||||
export class NodePackages {
|
||||
static get promise(): any;
|
||||
static get(name: any): any;
|
||||
}
|
||||
export class NodeStandardFontDataFactory extends BaseStandardFontDataFactory {
|
||||
}
|
||||
import { BaseCanvasFactory } from "./canvas_factory.js";
|
||||
import { BaseCMapReaderFactory } from "./cmap_reader_factory.js";
|
||||
import { BaseFilterFactory } from "./filter_factory.js";
|
||||
import { BaseStandardFontDataFactory } from "./standard_fontdata_factory.js";
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,108 @@
|
||||
'use client';
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
import { jsx as _jsx } from "react/jsx-runtime";
|
||||
import { useEffect, useMemo } from 'react';
|
||||
import makeCancellable from 'make-cancellable-promise';
|
||||
import makeEventProps from 'make-event-props';
|
||||
import clsx from 'clsx';
|
||||
import invariant from 'tiny-invariant';
|
||||
import warning from 'warning';
|
||||
import OutlineContext from './OutlineContext.js';
|
||||
import OutlineItem from './OutlineItem.js';
|
||||
import { cancelRunningTask } from './shared/utils.js';
|
||||
import useDocumentContext from './shared/hooks/useDocumentContext.js';
|
||||
import useResolver from './shared/hooks/useResolver.js';
|
||||
/**
|
||||
* Displays an outline (table of contents).
|
||||
*
|
||||
* Should be placed inside `<Document />`. Alternatively, it can have `pdf` prop passed, which can be obtained from `<Document />`'s `onLoadSuccess` callback function.
|
||||
*/
|
||||
export default function Outline(props) {
|
||||
const documentContext = useDocumentContext();
|
||||
const mergedProps = Object.assign(Object.assign({}, documentContext), props);
|
||||
const { className, inputRef, onItemClick, onLoadError: onLoadErrorProps, onLoadSuccess: onLoadSuccessProps, pdf } = mergedProps, otherProps = __rest(mergedProps, ["className", "inputRef", "onItemClick", "onLoadError", "onLoadSuccess", "pdf"]);
|
||||
invariant(pdf, 'Attempted to load an outline, but no document was specified. Wrap <Outline /> in a <Document /> or pass explicit `pdf` prop.');
|
||||
const [outlineState, outlineDispatch] = useResolver();
|
||||
const { value: outline, error: outlineError } = outlineState;
|
||||
/**
|
||||
* Called when an outline is read successfully
|
||||
*/
|
||||
function onLoadSuccess() {
|
||||
if (typeof outline === 'undefined' || outline === false) {
|
||||
return;
|
||||
}
|
||||
if (onLoadSuccessProps) {
|
||||
onLoadSuccessProps(outline);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Called when an outline failed to read successfully
|
||||
*/
|
||||
function onLoadError() {
|
||||
if (!outlineError) {
|
||||
// Impossible, but TypeScript doesn't know that
|
||||
return;
|
||||
}
|
||||
warning(false, outlineError.toString());
|
||||
if (onLoadErrorProps) {
|
||||
onLoadErrorProps(outlineError);
|
||||
}
|
||||
}
|
||||
// biome-ignore lint/correctness/useExhaustiveDependencies: useEffect intentionally triggered on pdf change
|
||||
useEffect(function resetOutline() {
|
||||
outlineDispatch({ type: 'RESET' });
|
||||
}, [outlineDispatch, pdf]);
|
||||
useEffect(function loadOutline() {
|
||||
if (!pdf) {
|
||||
// Impossible, but TypeScript doesn't know that
|
||||
return;
|
||||
}
|
||||
const cancellable = makeCancellable(pdf.getOutline());
|
||||
const runningTask = cancellable;
|
||||
cancellable.promise
|
||||
.then((nextOutline) => {
|
||||
outlineDispatch({ type: 'RESOLVE', value: nextOutline });
|
||||
})
|
||||
.catch((error) => {
|
||||
outlineDispatch({ type: 'REJECT', error });
|
||||
});
|
||||
return () => cancelRunningTask(runningTask);
|
||||
}, [outlineDispatch, pdf]);
|
||||
// biome-ignore lint/correctness/useExhaustiveDependencies: Ommitted callbacks so they are not called every time they change
|
||||
useEffect(() => {
|
||||
if (outline === undefined) {
|
||||
return;
|
||||
}
|
||||
if (outline === false) {
|
||||
onLoadError();
|
||||
return;
|
||||
}
|
||||
onLoadSuccess();
|
||||
}, [outline]);
|
||||
const childContext = useMemo(() => ({
|
||||
onItemClick,
|
||||
}), [onItemClick]);
|
||||
const eventProps = useMemo(() => makeEventProps(otherProps, () => outline),
|
||||
// biome-ignore lint/correctness/useExhaustiveDependencies: FIXME
|
||||
[otherProps, outline]);
|
||||
if (!outline) {
|
||||
return null;
|
||||
}
|
||||
function renderOutline() {
|
||||
if (!outline) {
|
||||
return null;
|
||||
}
|
||||
return (_jsx("ul", { children: outline.map((item, itemIndex) => (_jsx(OutlineItem, { item: item, pdf: pdf }, typeof item.dest === 'string' ? item.dest : itemIndex))) }));
|
||||
}
|
||||
return (_jsx("div", Object.assign({ className: clsx('react-pdf__Outline', className), ref: inputRef }, eventProps, { children: _jsx(OutlineContext.Provider, { value: childContext, children: renderOutline() }) })));
|
||||
}
|
||||
Reference in New Issue
Block a user