update
This commit is contained in:
@@ -0,0 +1,166 @@
|
||||
'use client';
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import mergeRefs from 'merge-refs';
|
||||
import invariant from 'tiny-invariant';
|
||||
import warning from 'warning';
|
||||
import * as pdfjs from 'pdfjs-dist';
|
||||
|
||||
import StructTree from '../StructTree.js';
|
||||
|
||||
import usePageContext from '../shared/hooks/usePageContext.js';
|
||||
import {
|
||||
cancelRunningTask,
|
||||
getDevicePixelRatio,
|
||||
isCancelException,
|
||||
makePageCallback,
|
||||
} from '../shared/utils.js';
|
||||
|
||||
import type { RenderParameters } from 'pdfjs-dist/types/src/display/api.js';
|
||||
|
||||
const ANNOTATION_MODE = pdfjs.AnnotationMode;
|
||||
|
||||
type CanvasProps = {
|
||||
canvasRef?: React.Ref<HTMLCanvasElement>;
|
||||
};
|
||||
|
||||
export default function Canvas(props: CanvasProps): React.ReactElement {
|
||||
const pageContext = usePageContext();
|
||||
|
||||
invariant(pageContext, 'Unable to find Page context.');
|
||||
|
||||
const mergedProps = { ...pageContext, ...props };
|
||||
const {
|
||||
_className,
|
||||
canvasBackground,
|
||||
devicePixelRatio = getDevicePixelRatio(),
|
||||
onRenderError: onRenderErrorProps,
|
||||
onRenderSuccess: onRenderSuccessProps,
|
||||
page,
|
||||
renderForms,
|
||||
renderTextLayer,
|
||||
rotate,
|
||||
scale,
|
||||
} = mergedProps;
|
||||
const { canvasRef } = props;
|
||||
|
||||
invariant(page, 'Attempted to render page canvas, but no page was specified.');
|
||||
|
||||
const canvasElement = useRef<HTMLCanvasElement>(null);
|
||||
|
||||
/**
|
||||
* Called when a page is rendered successfully.
|
||||
*/
|
||||
function onRenderSuccess() {
|
||||
if (!page) {
|
||||
// Impossible, but TypeScript doesn't know that
|
||||
return;
|
||||
}
|
||||
|
||||
if (onRenderSuccessProps) {
|
||||
onRenderSuccessProps(makePageCallback(page, scale));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when a page fails to render.
|
||||
*/
|
||||
function onRenderError(error: Error) {
|
||||
if (isCancelException(error)) {
|
||||
return;
|
||||
}
|
||||
|
||||
warning(false, error.toString());
|
||||
|
||||
if (onRenderErrorProps) {
|
||||
onRenderErrorProps(error);
|
||||
}
|
||||
}
|
||||
|
||||
const renderViewport = useMemo(
|
||||
() => page.getViewport({ scale: scale * devicePixelRatio, rotation: rotate }),
|
||||
[devicePixelRatio, page, rotate, scale],
|
||||
);
|
||||
|
||||
const viewport = useMemo(
|
||||
() => page.getViewport({ scale, rotation: rotate }),
|
||||
[page, rotate, scale],
|
||||
);
|
||||
|
||||
// biome-ignore lint/correctness/useExhaustiveDependencies: Ommitted callbacks so they are not called every time they change
|
||||
useEffect(
|
||||
function drawPageOnCanvas() {
|
||||
if (!page) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensures the canvas will be re-rendered from scratch. Otherwise all form data will stay.
|
||||
page.cleanup();
|
||||
|
||||
const { current: canvas } = canvasElement;
|
||||
|
||||
if (!canvas) {
|
||||
return;
|
||||
}
|
||||
|
||||
canvas.width = renderViewport.width;
|
||||
canvas.height = renderViewport.height;
|
||||
|
||||
canvas.style.width = `${Math.floor(viewport.width)}px`;
|
||||
canvas.style.height = `${Math.floor(viewport.height)}px`;
|
||||
canvas.style.visibility = 'hidden';
|
||||
|
||||
const renderContext: RenderParameters = {
|
||||
annotationMode: renderForms ? ANNOTATION_MODE.ENABLE_FORMS : ANNOTATION_MODE.ENABLE,
|
||||
canvasContext: canvas.getContext('2d', { alpha: false }) as CanvasRenderingContext2D,
|
||||
viewport: renderViewport,
|
||||
};
|
||||
if (canvasBackground) {
|
||||
renderContext.background = canvasBackground;
|
||||
}
|
||||
|
||||
const cancellable = page.render(renderContext);
|
||||
const runningTask = cancellable;
|
||||
|
||||
cancellable.promise
|
||||
.then(() => {
|
||||
canvas.style.visibility = '';
|
||||
|
||||
onRenderSuccess();
|
||||
})
|
||||
.catch(onRenderError);
|
||||
|
||||
return () => cancelRunningTask(runningTask);
|
||||
},
|
||||
[canvasBackground, page, renderForms, renderViewport, viewport],
|
||||
);
|
||||
|
||||
const cleanup = useCallback(() => {
|
||||
const { current: canvas } = canvasElement;
|
||||
|
||||
/**
|
||||
* Zeroing the width and height cause most browsers to release graphics
|
||||
* resources immediately, which can greatly reduce memory consumption.
|
||||
*/
|
||||
if (canvas) {
|
||||
canvas.width = 0;
|
||||
canvas.height = 0;
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => cleanup, [cleanup]);
|
||||
|
||||
return (
|
||||
<canvas
|
||||
className={`${_className}__canvas`}
|
||||
dir="ltr"
|
||||
ref={mergeRefs(canvasRef, canvasElement)}
|
||||
style={{
|
||||
display: 'block',
|
||||
userSelect: 'none',
|
||||
}}
|
||||
>
|
||||
{renderTextLayer ? <StructTree /> : null}
|
||||
</canvas>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"1":"F A B","2":"K D E mC"},B:{"1":"0 9 C L M G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I"},C:{"1":"0 1 2 3 4 5 6 7 8 9 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"nC LC qC rC"},D:{"1":"0 1 2 3 4 5 6 7 8 9 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC"},E:{"1":"K D E F A B C L M G tC uC vC wC TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","2":"J sC SC","16":"PB"},F:{"1":"0 1 2 3 4 5 6 7 8 B C G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z FC kC 8C GC","2":"F 4C 5C 6C 7C"},G:{"1":"E 9C lC AD BD CD DD ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","16":"SC"},H:{"1":"WD"},I:{"1":"LC J I ZD aD lC bD cD","16":"XD YD"},J:{"1":"D A"},K:{"1":"B C H FC kC GC","2":"A"},L:{"1":"I"},M:{"1":"EC"},N:{"1":"A B"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"1":"oD"},R:{"1":"pD"},S:{"1":"qD rD"}},B:1,C:"document.head",D:true};
|
||||
@@ -0,0 +1,41 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = _iterableToArrayLimit;
|
||||
function _iterableToArrayLimit(arr, i) {
|
||||
var iterator = arr == null ? null : typeof Symbol !== "undefined" && arr[Symbol.iterator] || arr["@@iterator"];
|
||||
if (iterator == null) return;
|
||||
var _arr = [];
|
||||
var iteratorNormalCompletion = true;
|
||||
var didIteratorError = false;
|
||||
var step, iteratorError, next, _return;
|
||||
try {
|
||||
next = (iterator = iterator.call(arr)).next;
|
||||
if (i === 0) {
|
||||
if (Object(iterator) !== iterator) return;
|
||||
iteratorNormalCompletion = false;
|
||||
} else {
|
||||
for (; !(iteratorNormalCompletion = (step = next.call(iterator)).done); iteratorNormalCompletion = true) {
|
||||
_arr.push(step.value);
|
||||
if (_arr.length === i) break;
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
didIteratorError = true;
|
||||
iteratorError = err;
|
||||
} finally {
|
||||
try {
|
||||
if (!iteratorNormalCompletion && iterator["return"] != null) {
|
||||
_return = iterator["return"]();
|
||||
if (Object(_return) !== _return) return;
|
||||
}
|
||||
} finally {
|
||||
if (didIteratorError) throw iteratorError;
|
||||
}
|
||||
}
|
||||
return _arr;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=iterableToArrayLimit.js.map
|
||||
@@ -0,0 +1,36 @@
|
||||
declare namespace stripJsonComments {
|
||||
interface Options {
|
||||
/**
|
||||
Replace comments with whitespace instead of stripping them entirely.
|
||||
|
||||
@default true
|
||||
*/
|
||||
readonly whitespace?: boolean;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Strip comments from JSON. Lets you use comments in your JSON files!
|
||||
|
||||
It will replace single-line comments `//` and multi-line comments `/**\/` with whitespace. This allows JSON error positions to remain as close as possible to the original source.
|
||||
|
||||
@param jsonString - Accepts a string with JSON.
|
||||
@returns A JSON string without comments.
|
||||
|
||||
@example
|
||||
```
|
||||
const json = `{
|
||||
// Rainbows
|
||||
"unicorn": "cake"
|
||||
}`;
|
||||
|
||||
JSON.parse(stripJsonComments(json));
|
||||
//=> {unicorn: 'cake'}
|
||||
```
|
||||
*/
|
||||
declare function stripJsonComments(
|
||||
jsonString: string,
|
||||
options?: stripJsonComments.Options
|
||||
): string;
|
||||
|
||||
export = stripJsonComments;
|
||||
@@ -0,0 +1,46 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.usePageContext = exports.useOutlineContext = exports.useDocumentContext = exports.Thumbnail = exports.Page = exports.Outline = exports.Document = exports.pdfjs = void 0;
|
||||
const pdfjs = __importStar(require("pdfjs-dist"));
|
||||
exports.pdfjs = pdfjs;
|
||||
const Document_js_1 = __importDefault(require("./Document.js"));
|
||||
exports.Document = Document_js_1.default;
|
||||
const Outline_js_1 = __importDefault(require("./Outline.js"));
|
||||
exports.Outline = Outline_js_1.default;
|
||||
const Page_js_1 = __importDefault(require("./Page.js"));
|
||||
exports.Page = Page_js_1.default;
|
||||
const Thumbnail_js_1 = __importDefault(require("./Thumbnail.js"));
|
||||
exports.Thumbnail = Thumbnail_js_1.default;
|
||||
const useDocumentContext_js_1 = __importDefault(require("./shared/hooks/useDocumentContext.js"));
|
||||
exports.useDocumentContext = useDocumentContext_js_1.default;
|
||||
const useOutlineContext_js_1 = __importDefault(require("./shared/hooks/useOutlineContext.js"));
|
||||
exports.useOutlineContext = useOutlineContext_js_1.default;
|
||||
const usePageContext_js_1 = __importDefault(require("./shared/hooks/usePageContext.js"));
|
||||
exports.usePageContext = usePageContext_js_1.default;
|
||||
require("./pdf.worker.entry.js");
|
||||
@@ -0,0 +1,41 @@
|
||||
{
|
||||
"name": "globals",
|
||||
"version": "11.12.0",
|
||||
"description": "Global identifiers from different JavaScript environments",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/globals",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"globals.json"
|
||||
],
|
||||
"keywords": [
|
||||
"globals",
|
||||
"global",
|
||||
"identifiers",
|
||||
"variables",
|
||||
"vars",
|
||||
"jshint",
|
||||
"eslint",
|
||||
"environments"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "0.21.0",
|
||||
"xo": "0.18.0"
|
||||
},
|
||||
"xo": {
|
||||
"ignores": [
|
||||
"get-browser-globals.js"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
'use strict';
|
||||
module.exports = function generate__limitItems(it, $keyword, $ruleType) {
|
||||
var out = ' ';
|
||||
var $lvl = it.level;
|
||||
var $dataLvl = it.dataLevel;
|
||||
var $schema = it.schema[$keyword];
|
||||
var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
|
||||
var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
|
||||
var $breakOnError = !it.opts.allErrors;
|
||||
var $errorKeyword;
|
||||
var $data = 'data' + ($dataLvl || '');
|
||||
var $isData = it.opts.$data && $schema && $schema.$data,
|
||||
$schemaValue;
|
||||
if ($isData) {
|
||||
out += ' var schema' + ($lvl) + ' = ' + (it.util.getData($schema.$data, $dataLvl, it.dataPathArr)) + '; ';
|
||||
$schemaValue = 'schema' + $lvl;
|
||||
} else {
|
||||
$schemaValue = $schema;
|
||||
}
|
||||
if (!($isData || typeof $schema == 'number')) {
|
||||
throw new Error($keyword + ' must be number');
|
||||
}
|
||||
var $op = $keyword == 'maxItems' ? '>' : '<';
|
||||
out += 'if ( ';
|
||||
if ($isData) {
|
||||
out += ' (' + ($schemaValue) + ' !== undefined && typeof ' + ($schemaValue) + ' != \'number\') || ';
|
||||
}
|
||||
out += ' ' + ($data) + '.length ' + ($op) + ' ' + ($schemaValue) + ') { ';
|
||||
var $errorKeyword = $keyword;
|
||||
var $$outStack = $$outStack || [];
|
||||
$$outStack.push(out);
|
||||
out = ''; /* istanbul ignore else */
|
||||
if (it.createErrors !== false) {
|
||||
out += ' { keyword: \'' + ($errorKeyword || '_limitItems') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { limit: ' + ($schemaValue) + ' } ';
|
||||
if (it.opts.messages !== false) {
|
||||
out += ' , message: \'should NOT have ';
|
||||
if ($keyword == 'maxItems') {
|
||||
out += 'more';
|
||||
} else {
|
||||
out += 'fewer';
|
||||
}
|
||||
out += ' than ';
|
||||
if ($isData) {
|
||||
out += '\' + ' + ($schemaValue) + ' + \'';
|
||||
} else {
|
||||
out += '' + ($schema);
|
||||
}
|
||||
out += ' items\' ';
|
||||
}
|
||||
if (it.opts.verbose) {
|
||||
out += ' , schema: ';
|
||||
if ($isData) {
|
||||
out += 'validate.schema' + ($schemaPath);
|
||||
} else {
|
||||
out += '' + ($schema);
|
||||
}
|
||||
out += ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
|
||||
}
|
||||
out += ' } ';
|
||||
} else {
|
||||
out += ' {} ';
|
||||
}
|
||||
var __err = out;
|
||||
out = $$outStack.pop();
|
||||
if (!it.compositeRule && $breakOnError) {
|
||||
/* istanbul ignore if */
|
||||
if (it.async) {
|
||||
out += ' throw new ValidationError([' + (__err) + ']); ';
|
||||
} else {
|
||||
out += ' validate.errors = [' + (__err) + ']; return false; ';
|
||||
}
|
||||
} else {
|
||||
out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
|
||||
}
|
||||
out += '} ';
|
||||
if ($breakOnError) {
|
||||
out += ' else { ';
|
||||
}
|
||||
return out;
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"1":"B","2":"K D E F A mC"},B:{"1":"0 9 C L M G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I"},C:{"1":"0 9 SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"1 2 3 4 5 6 7 8 nC LC J PB K D E F A B C L M G N O P QB RB qC rC"},D:{"1":"0 5 6 7 8 9 RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","2":"1 2 3 4 J PB K D E F A B C L M G N O P QB"},E:{"1":"A B C L M G TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","2":"J PB K D E F sC SC tC uC vC wC"},F:{"1":"0 1 2 3 4 5 6 7 8 G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z","2":"F B C 4C 5C 6C 7C FC kC 8C GC"},G:{"1":"GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","2":"E SC 9C lC AD BD CD DD ED FD"},H:{"2":"WD"},I:{"1":"I bD cD","2":"LC J XD YD ZD aD lC"},J:{"2":"D A"},K:{"1":"H","2":"A B C FC kC GC"},L:{"1":"I"},M:{"1":"EC"},N:{"1":"B","2":"A"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"1":"oD"},R:{"1":"pD"},S:{"1":"rD","2":"qD"}},B:6,C:"Internationalization API",D:true};
|
||||
@@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env node
|
||||
/* eslint no-var: 0 */
|
||||
|
||||
var parser = require("..");
|
||||
var fs = require("fs");
|
||||
|
||||
var filename = process.argv[2];
|
||||
if (!filename) {
|
||||
console.error("no filename specified");
|
||||
} else {
|
||||
var file = fs.readFileSync(filename, "utf8");
|
||||
var ast = parser.parse(file);
|
||||
|
||||
console.log(JSON.stringify(ast, null, " "));
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
import parse = require('./parse')
|
||||
import stringify = require('./stringify')
|
||||
|
||||
export {parse, stringify}
|
||||
@@ -0,0 +1,358 @@
|
||||
/**
|
||||
* @fileoverview Rule to check empty newline between class members
|
||||
* @author 薛定谔的猫<hh_2013@foxmail.com>
|
||||
* @deprecated in ESLint v8.53.0
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const astUtils = require("./utils/ast-utils");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Types of class members.
|
||||
* Those have `test` method to check it matches to the given class member.
|
||||
* @private
|
||||
*/
|
||||
const ClassMemberTypes = {
|
||||
"*": { test: () => true },
|
||||
field: { test: node => node.type === "PropertyDefinition" },
|
||||
method: { test: node => node.type === "MethodDefinition" },
|
||||
};
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/** @type {import('../shared/types').Rule} */
|
||||
module.exports = {
|
||||
meta: {
|
||||
deprecated: {
|
||||
message: "Formatting rules are being moved out of ESLint core.",
|
||||
url: "https://eslint.org/blog/2023/10/deprecating-formatting-rules/",
|
||||
deprecatedSince: "8.53.0",
|
||||
availableUntil: "10.0.0",
|
||||
replacedBy: [
|
||||
{
|
||||
message:
|
||||
"ESLint Stylistic now maintains deprecated stylistic core rules.",
|
||||
url: "https://eslint.style/guide/migration",
|
||||
plugin: {
|
||||
name: "@stylistic/eslint-plugin-js",
|
||||
url: "https://eslint.style/packages/js",
|
||||
},
|
||||
rule: {
|
||||
name: "lines-between-class-members",
|
||||
url: "https://eslint.style/rules/js/lines-between-class-members",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
type: "layout",
|
||||
|
||||
docs: {
|
||||
description:
|
||||
"Require or disallow an empty line between class members",
|
||||
recommended: false,
|
||||
url: "https://eslint.org/docs/latest/rules/lines-between-class-members",
|
||||
},
|
||||
|
||||
fixable: "whitespace",
|
||||
|
||||
schema: [
|
||||
{
|
||||
anyOf: [
|
||||
{
|
||||
type: "object",
|
||||
properties: {
|
||||
enforce: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
blankLine: {
|
||||
enum: ["always", "never"],
|
||||
},
|
||||
prev: {
|
||||
enum: ["method", "field", "*"],
|
||||
},
|
||||
next: {
|
||||
enum: ["method", "field", "*"],
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ["blankLine", "prev", "next"],
|
||||
},
|
||||
minItems: 1,
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ["enforce"],
|
||||
},
|
||||
{
|
||||
enum: ["always", "never"],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: "object",
|
||||
properties: {
|
||||
exceptAfterSingleLine: {
|
||||
type: "boolean",
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
},
|
||||
],
|
||||
messages: {
|
||||
never: "Unexpected blank line between class members.",
|
||||
always: "Expected blank line between class members.",
|
||||
},
|
||||
},
|
||||
|
||||
create(context) {
|
||||
const options = [];
|
||||
|
||||
options[0] = context.options[0] || "always";
|
||||
options[1] = context.options[1] || { exceptAfterSingleLine: false };
|
||||
|
||||
const configureList =
|
||||
typeof options[0] === "object"
|
||||
? options[0].enforce
|
||||
: [{ blankLine: options[0], prev: "*", next: "*" }];
|
||||
const sourceCode = context.sourceCode;
|
||||
|
||||
/**
|
||||
* Gets a pair of tokens that should be used to check lines between two class member nodes.
|
||||
*
|
||||
* In most cases, this returns the very last token of the current node and
|
||||
* the very first token of the next node.
|
||||
* For example:
|
||||
*
|
||||
* class C {
|
||||
* x = 1; // curLast: `;` nextFirst: `in`
|
||||
* in = 2
|
||||
* }
|
||||
*
|
||||
* There is only one exception. If the given node ends with a semicolon, and it looks like
|
||||
* a semicolon-less style's semicolon - one that is not on the same line as the preceding
|
||||
* token, but is on the line where the next class member starts - this returns the preceding
|
||||
* token and the semicolon as boundary tokens.
|
||||
* For example:
|
||||
*
|
||||
* class C {
|
||||
* x = 1 // curLast: `1` nextFirst: `;`
|
||||
* ;in = 2
|
||||
* }
|
||||
* When determining the desired layout of the code, we should treat this semicolon as
|
||||
* a part of the next class member node instead of the one it technically belongs to.
|
||||
* @param {ASTNode} curNode Current class member node.
|
||||
* @param {ASTNode} nextNode Next class member node.
|
||||
* @returns {Token} The actual last token of `node`.
|
||||
* @private
|
||||
*/
|
||||
function getBoundaryTokens(curNode, nextNode) {
|
||||
const lastToken = sourceCode.getLastToken(curNode);
|
||||
const prevToken = sourceCode.getTokenBefore(lastToken);
|
||||
const nextToken = sourceCode.getFirstToken(nextNode); // skip possible lone `;` between nodes
|
||||
|
||||
const isSemicolonLessStyle =
|
||||
astUtils.isSemicolonToken(lastToken) &&
|
||||
!astUtils.isTokenOnSameLine(prevToken, lastToken) &&
|
||||
astUtils.isTokenOnSameLine(lastToken, nextToken);
|
||||
|
||||
return isSemicolonLessStyle
|
||||
? { curLast: prevToken, nextFirst: lastToken }
|
||||
: { curLast: lastToken, nextFirst: nextToken };
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the last token among the consecutive tokens that have no exceed max line difference in between, before the first token in the next member.
|
||||
* @param {Token} prevLastToken The last token in the previous member node.
|
||||
* @param {Token} nextFirstToken The first token in the next member node.
|
||||
* @param {number} maxLine The maximum number of allowed line difference between consecutive tokens.
|
||||
* @returns {Token} The last token among the consecutive tokens.
|
||||
*/
|
||||
function findLastConsecutiveTokenAfter(
|
||||
prevLastToken,
|
||||
nextFirstToken,
|
||||
maxLine,
|
||||
) {
|
||||
const after = sourceCode.getTokenAfter(prevLastToken, {
|
||||
includeComments: true,
|
||||
});
|
||||
|
||||
if (
|
||||
after !== nextFirstToken &&
|
||||
after.loc.start.line - prevLastToken.loc.end.line <= maxLine
|
||||
) {
|
||||
return findLastConsecutiveTokenAfter(
|
||||
after,
|
||||
nextFirstToken,
|
||||
maxLine,
|
||||
);
|
||||
}
|
||||
return prevLastToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the first token among the consecutive tokens that have no exceed max line difference in between, after the last token in the previous member.
|
||||
* @param {Token} nextFirstToken The first token in the next member node.
|
||||
* @param {Token} prevLastToken The last token in the previous member node.
|
||||
* @param {number} maxLine The maximum number of allowed line difference between consecutive tokens.
|
||||
* @returns {Token} The first token among the consecutive tokens.
|
||||
*/
|
||||
function findFirstConsecutiveTokenBefore(
|
||||
nextFirstToken,
|
||||
prevLastToken,
|
||||
maxLine,
|
||||
) {
|
||||
const before = sourceCode.getTokenBefore(nextFirstToken, {
|
||||
includeComments: true,
|
||||
});
|
||||
|
||||
if (
|
||||
before !== prevLastToken &&
|
||||
nextFirstToken.loc.start.line - before.loc.end.line <= maxLine
|
||||
) {
|
||||
return findFirstConsecutiveTokenBefore(
|
||||
before,
|
||||
prevLastToken,
|
||||
maxLine,
|
||||
);
|
||||
}
|
||||
return nextFirstToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if there is a token or comment between two tokens.
|
||||
* @param {Token} before The token before.
|
||||
* @param {Token} after The token after.
|
||||
* @returns {boolean} True if there is a token or comment between two tokens.
|
||||
*/
|
||||
function hasTokenOrCommentBetween(before, after) {
|
||||
return (
|
||||
sourceCode.getTokensBetween(before, after, {
|
||||
includeComments: true,
|
||||
}).length !== 0
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the given node matches the given type.
|
||||
* @param {ASTNode} node The class member node to check.
|
||||
* @param {string} type The class member type to check.
|
||||
* @returns {boolean} `true` if the class member node matched the type.
|
||||
* @private
|
||||
*/
|
||||
function match(node, type) {
|
||||
return ClassMemberTypes[type].test(node);
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the last matched configuration from the configureList.
|
||||
* @param {ASTNode} prevNode The previous node to match.
|
||||
* @param {ASTNode} nextNode The current node to match.
|
||||
* @returns {string|null} Padding type or `null` if no matches were found.
|
||||
* @private
|
||||
*/
|
||||
function getPaddingType(prevNode, nextNode) {
|
||||
for (let i = configureList.length - 1; i >= 0; --i) {
|
||||
const configure = configureList[i];
|
||||
const matched =
|
||||
match(prevNode, configure.prev) &&
|
||||
match(nextNode, configure.next);
|
||||
|
||||
if (matched) {
|
||||
return configure.blankLine;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
ClassBody(node) {
|
||||
const body = node.body;
|
||||
|
||||
for (let i = 0; i < body.length - 1; i++) {
|
||||
const curFirst = sourceCode.getFirstToken(body[i]);
|
||||
const { curLast, nextFirst } = getBoundaryTokens(
|
||||
body[i],
|
||||
body[i + 1],
|
||||
);
|
||||
const isMulti = !astUtils.isTokenOnSameLine(
|
||||
curFirst,
|
||||
curLast,
|
||||
);
|
||||
const skip = !isMulti && options[1].exceptAfterSingleLine;
|
||||
const beforePadding = findLastConsecutiveTokenAfter(
|
||||
curLast,
|
||||
nextFirst,
|
||||
1,
|
||||
);
|
||||
const afterPadding = findFirstConsecutiveTokenBefore(
|
||||
nextFirst,
|
||||
curLast,
|
||||
1,
|
||||
);
|
||||
const isPadded =
|
||||
afterPadding.loc.start.line -
|
||||
beforePadding.loc.end.line >
|
||||
1;
|
||||
const hasTokenInPadding = hasTokenOrCommentBetween(
|
||||
beforePadding,
|
||||
afterPadding,
|
||||
);
|
||||
const curLineLastToken = findLastConsecutiveTokenAfter(
|
||||
curLast,
|
||||
nextFirst,
|
||||
0,
|
||||
);
|
||||
const paddingType = getPaddingType(body[i], body[i + 1]);
|
||||
|
||||
if (paddingType === "never" && isPadded) {
|
||||
context.report({
|
||||
node: body[i + 1],
|
||||
messageId: "never",
|
||||
|
||||
fix(fixer) {
|
||||
if (hasTokenInPadding) {
|
||||
return null;
|
||||
}
|
||||
return fixer.replaceTextRange(
|
||||
[
|
||||
beforePadding.range[1],
|
||||
afterPadding.range[0],
|
||||
],
|
||||
"\n",
|
||||
);
|
||||
},
|
||||
});
|
||||
} else if (paddingType === "always" && !skip && !isPadded) {
|
||||
context.report({
|
||||
node: body[i + 1],
|
||||
messageId: "always",
|
||||
|
||||
fix(fixer) {
|
||||
if (hasTokenInPadding) {
|
||||
return null;
|
||||
}
|
||||
return fixer.insertTextAfter(
|
||||
curLineLastToken,
|
||||
"\n",
|
||||
);
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,141 @@
|
||||
# `@humanfs/node`
|
||||
|
||||
by [Nicholas C. Zakas](https://humanwhocodes.com)
|
||||
|
||||
If you find this useful, please consider supporting my work with a [donation](https://humanwhocodes.com/donate) or [nominate me](https://stars.github.com/nominate/) for a GitHub Star.
|
||||
|
||||
## Description
|
||||
|
||||
The `hfs` bindings for use in Node.js and Node.js-compatible runtimes.
|
||||
|
||||
> [!WARNING]
|
||||
> This project is **experimental** and may change significantly before v1.0.0. Use at your own caution and definitely not in production!
|
||||
|
||||
## Installation
|
||||
|
||||
Install using your favorite package manager:
|
||||
|
||||
```shell
|
||||
npm install @humanfs/node
|
||||
|
||||
# or
|
||||
|
||||
pnpm install @humanfs/node
|
||||
|
||||
# or
|
||||
|
||||
yarn add @humanfs/node
|
||||
|
||||
# or
|
||||
|
||||
bun install @humanfs/node
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
The easiest way to use hfs in your project is to import the `hfs` object:
|
||||
|
||||
```js
|
||||
import { hfs } from "@humanfs/node";
|
||||
```
|
||||
|
||||
Then, you can use the API methods:
|
||||
|
||||
```js
|
||||
// 1. Files
|
||||
|
||||
// read from a text file
|
||||
const text = await hfs.text("file.txt");
|
||||
|
||||
// read from a JSON file
|
||||
const json = await hfs.json("file.json");
|
||||
|
||||
// read raw bytes from a text file
|
||||
const arrayBuffer = await hfs.arrayBuffer("file.txt");
|
||||
|
||||
// write text to a file
|
||||
await hfs.write("file.txt", "Hello world!");
|
||||
|
||||
// write bytes to a file
|
||||
await hfs.write("file.txt", new TextEncoder().encode("Hello world!"));
|
||||
|
||||
// append text to a file
|
||||
await hfs.append("file.txt", "Hello world!");
|
||||
|
||||
// append bytes to a file
|
||||
await hfs.append("file.txt", new TextEncoder().encode("Hello world!"));
|
||||
|
||||
// does the file exist?
|
||||
const found = await hfs.isFile("file.txt");
|
||||
|
||||
// how big is the file?
|
||||
const size = await hfs.size("file.txt");
|
||||
|
||||
// when was the file modified?
|
||||
const mtime = await hfs.lastModified("file.txt");
|
||||
|
||||
// copy a file from one location to another
|
||||
await hfs.copy("file.txt", "file-copy.txt");
|
||||
|
||||
// move a file from one location to another
|
||||
await hfs.move("file.txt", "renamed.txt");
|
||||
|
||||
// delete a file
|
||||
await hfs.delete("file.txt");
|
||||
|
||||
// 2. Directories
|
||||
|
||||
// create a directory
|
||||
await hfs.createDirectory("dir");
|
||||
|
||||
// create a directory recursively
|
||||
await hfs.createDirectory("dir/subdir");
|
||||
|
||||
// does the directory exist?
|
||||
const dirFound = await hfs.isDirectory("dir");
|
||||
|
||||
// copy the entire directory
|
||||
hfs.copyAll("from-dir", "to-dir");
|
||||
|
||||
// move the entire directory
|
||||
hfs.moveAll("from-dir", "to-dir");
|
||||
|
||||
// delete a directory
|
||||
await hfs.delete("dir");
|
||||
|
||||
// delete a non-empty directory
|
||||
await hfs.deleteAll("dir");
|
||||
```
|
||||
|
||||
If you'd like to create your own instance, import the `NodeHfs` constructor:
|
||||
|
||||
```js
|
||||
import { NodeHfs } from "@humanfs/node";
|
||||
import fsp from "fs/promises";
|
||||
|
||||
const hfs = new NodeHfs();
|
||||
|
||||
// optionally specify the fs/promises object to use
|
||||
const hfs = new NodeHfs({ fsp });
|
||||
```
|
||||
|
||||
If you'd like to use just the impl, import the `NodeHfsImpl` constructor:
|
||||
|
||||
```js
|
||||
import { NodeHfsImpl } from "@humanfs/node";
|
||||
import fsp from "fs/promises";
|
||||
|
||||
const hfs = new NodeHfsImpl();
|
||||
|
||||
// optionally specify the fs/promises object to use
|
||||
const hfs = new NodeHfsImpl({ fsp });
|
||||
```
|
||||
|
||||
## Errors Handled
|
||||
|
||||
* `ENOENT` - in most cases, these errors are handled silently.
|
||||
* `ENFILE` and `EMFILE` - calls that result in these errors are retried for up to 60 seconds before giving up for good.
|
||||
|
||||
## License
|
||||
|
||||
Apache 2.0
|
||||
@@ -0,0 +1,91 @@
|
||||
/**
|
||||
* Generate secure URL-friendly unique ID.
|
||||
*
|
||||
* By default, the ID will have 21 symbols to have a collision probability
|
||||
* similar to UUID v4.
|
||||
*
|
||||
* ```js
|
||||
* import { nanoid } from 'nanoid'
|
||||
* model.id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqL"
|
||||
* ```
|
||||
*
|
||||
* @param size Size of the ID. The default size is 21.
|
||||
* @returns A random string.
|
||||
*/
|
||||
export function nanoid(size?: number): string
|
||||
|
||||
/**
|
||||
* Generate secure unique ID with custom alphabet.
|
||||
*
|
||||
* Alphabet must contain 256 symbols or less. Otherwise, the generator
|
||||
* will not be secure.
|
||||
*
|
||||
* @param alphabet Alphabet used to generate the ID.
|
||||
* @param defaultSize Size of the ID. The default size is 21.
|
||||
* @returns A random string generator.
|
||||
*
|
||||
* ```js
|
||||
* const { customAlphabet } = require('nanoid')
|
||||
* const nanoid = customAlphabet('0123456789абвгдеё', 5)
|
||||
* nanoid() //=> "8ё56а"
|
||||
* ```
|
||||
*/
|
||||
export function customAlphabet(
|
||||
alphabet: string,
|
||||
defaultSize?: number
|
||||
): (size?: number) => string
|
||||
|
||||
/**
|
||||
* Generate unique ID with custom random generator and alphabet.
|
||||
*
|
||||
* Alphabet must contain 256 symbols or less. Otherwise, the generator
|
||||
* will not be secure.
|
||||
*
|
||||
* ```js
|
||||
* import { customRandom } from 'nanoid/format'
|
||||
*
|
||||
* const nanoid = customRandom('abcdef', 5, size => {
|
||||
* const random = []
|
||||
* for (let i = 0; i < size; i++) {
|
||||
* random.push(randomByte())
|
||||
* }
|
||||
* return random
|
||||
* })
|
||||
*
|
||||
* nanoid() //=> "fbaef"
|
||||
* ```
|
||||
*
|
||||
* @param alphabet Alphabet used to generate a random string.
|
||||
* @param size Size of the random string.
|
||||
* @param random A random bytes generator.
|
||||
* @returns A random string generator.
|
||||
*/
|
||||
export function customRandom(
|
||||
alphabet: string,
|
||||
size: number,
|
||||
random: (bytes: number) => Uint8Array
|
||||
): () => string
|
||||
|
||||
/**
|
||||
* URL safe symbols.
|
||||
*
|
||||
* ```js
|
||||
* import { urlAlphabet } from 'nanoid'
|
||||
* const nanoid = customAlphabet(urlAlphabet, 10)
|
||||
* nanoid() //=> "Uakgb_J5m9"
|
||||
* ```
|
||||
*/
|
||||
export const urlAlphabet: string
|
||||
|
||||
/**
|
||||
* Generate an array of random bytes collected from hardware noise.
|
||||
*
|
||||
* ```js
|
||||
* import { customRandom, random } from 'nanoid'
|
||||
* const nanoid = customRandom("abcdef", 5, random)
|
||||
* ```
|
||||
*
|
||||
* @param bytes Size of the array.
|
||||
* @returns An array of random bytes.
|
||||
*/
|
||||
export function random(bytes: number): Uint8Array
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2":"K D E F A mC","132":"B"},B:{"1":"0 9 G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I","16":"C L M"},C:{"1":"0 1 2 3 4 5 6 7 8 9 QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"nC LC J PB K D E F A B C L M G N O P qC rC"},D:{"1":"0 1 2 3 4 5 6 7 8 9 G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","16":"J PB K D E F A B C L M"},E:{"1":"J PB K D E F A B C L M G tC uC vC wC TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","16":"sC SC"},F:{"1":"0 1 2 3 4 5 6 7 8 C G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z GC","2":"F B 4C 5C 6C 7C FC kC 8C"},G:{"1":"E SC 9C lC AD BD CD DD ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC"},H:{"2":"WD"},I:{"2":"LC J I XD YD ZD aD lC bD cD"},J:{"16":"D A"},K:{"2":"A B C H FC kC GC"},L:{"2":"I"},M:{"2":"EC"},N:{"16":"A B"},O:{"2":"HC"},P:{"2":"1 2 3 4 5 6 7 8 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"1":"oD"},R:{"2":"pD"},S:{"2":"qD rD"}},B:6,C:"Built-in PDF viewer",D:true};
|
||||
Reference in New Issue
Block a user