update
This commit is contained in:
@@ -0,0 +1,34 @@
|
||||
base64-js
|
||||
=========
|
||||
|
||||
`base64-js` does basic base64 encoding/decoding in pure JS.
|
||||
|
||||
[](http://travis-ci.org/beatgammit/base64-js)
|
||||
|
||||
Many browsers already have base64 encoding/decoding functionality, but it is for text data, not all-purpose binary data.
|
||||
|
||||
Sometimes encoding/decoding binary data in the browser is useful, and that is what this module does.
|
||||
|
||||
## install
|
||||
|
||||
With [npm](https://npmjs.org) do:
|
||||
|
||||
`npm install base64-js` and `var base64js = require('base64-js')`
|
||||
|
||||
For use in web browsers do:
|
||||
|
||||
`<script src="base64js.min.js"></script>`
|
||||
|
||||
[Get supported base64-js with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-base64-js?utm_source=npm-base64-js&utm_medium=referral&utm_campaign=readme)
|
||||
|
||||
## methods
|
||||
|
||||
`base64js` has three exposed functions, `byteLength`, `toByteArray` and `fromByteArray`, which both take a single argument.
|
||||
|
||||
* `byteLength` - Takes a base64 string and returns length of byte array
|
||||
* `toByteArray` - Takes a base64 string and returns a byte array
|
||||
* `fromByteArray` - Takes a byte array and returns a base64 string
|
||||
|
||||
## license
|
||||
|
||||
MIT
|
||||
@@ -0,0 +1,3 @@
|
||||
<svg width="12" height="13" viewBox="0 0 12 13" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M5.375 7.625V11.875C5.375 12.0408 5.44085 12.1997 5.55806 12.3169C5.67527 12.4342 5.83424 12.5 6 12.5C6.16576 12.5 6.32473 12.4342 6.44194 12.3169C6.55915 12.1997 6.625 12.0408 6.625 11.875V7.625L7.125 7.125H11.375C11.5408 7.125 11.6997 7.05915 11.8169 6.94194C11.9342 6.82473 12 6.66576 12 6.5C12 6.33424 11.9342 6.17527 11.8169 6.05806C11.6997 5.94085 11.5408 5.875 11.375 5.875H7.125L6.625 5.375V1.125C6.625 0.95924 6.55915 0.800269 6.44194 0.683058C6.32473 0.565848 6.16576 0.5 6 0.5C5.83424 0.5 5.67527 0.565848 5.55806 0.683058C5.44085 0.800269 5.375 0.95924 5.375 1.125V5.375L4.875 5.875H0.625C0.45924 5.875 0.300269 5.94085 0.183058 6.05806C0.065848 6.17527 0 6.33424 0 6.5C0 6.66576 0.065848 6.82473 0.183058 6.94194C0.300269 7.05915 0.45924 7.125 0.625 7.125H4.762L5.375 7.625Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 920 B |
@@ -0,0 +1,62 @@
|
||||
/**
|
||||
* @fileoverview Rule to disallow returning value from constructor.
|
||||
* @author Pig Fang <https://github.com/g-plane>
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/** @type {import('../shared/types').Rule} */
|
||||
module.exports = {
|
||||
meta: {
|
||||
type: "problem",
|
||||
|
||||
docs: {
|
||||
description: "Disallow returning value from constructor",
|
||||
recommended: false,
|
||||
url: "https://eslint.org/docs/latest/rules/no-constructor-return",
|
||||
},
|
||||
|
||||
schema: [],
|
||||
|
||||
fixable: null,
|
||||
|
||||
messages: {
|
||||
unexpected: "Unexpected return statement in constructor.",
|
||||
},
|
||||
},
|
||||
|
||||
create(context) {
|
||||
const stack = [];
|
||||
|
||||
return {
|
||||
onCodePathStart(_, node) {
|
||||
stack.push(node);
|
||||
},
|
||||
onCodePathEnd() {
|
||||
stack.pop();
|
||||
},
|
||||
ReturnStatement(node) {
|
||||
const last = stack.at(-1);
|
||||
|
||||
if (!last.parent) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
last.parent.type === "MethodDefinition" &&
|
||||
last.parent.kind === "constructor" &&
|
||||
node.argument
|
||||
) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: "unexpected",
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,84 @@
|
||||
import type {
|
||||
AnyRouter,
|
||||
Constrain,
|
||||
InferFrom,
|
||||
InferMaskFrom,
|
||||
InferMaskTo,
|
||||
InferSelected,
|
||||
InferShouldThrow,
|
||||
InferStrict,
|
||||
InferTo,
|
||||
RegisteredRouter,
|
||||
} from '@tanstack/router-core'
|
||||
import type { LinkComponentProps } from './link'
|
||||
import type { UseParamsOptions } from './useParams'
|
||||
import type { UseSearchOptions } from './useSearch'
|
||||
|
||||
export type ValidateLinkOptions<
|
||||
TRouter extends AnyRouter = RegisteredRouter,
|
||||
TOptions = unknown,
|
||||
TDefaultFrom extends string = string,
|
||||
TComp = 'a',
|
||||
> = Constrain<
|
||||
TOptions,
|
||||
LinkComponentProps<
|
||||
TComp,
|
||||
TRouter,
|
||||
InferFrom<TOptions, TDefaultFrom>,
|
||||
InferTo<TOptions>,
|
||||
InferMaskFrom<TOptions>,
|
||||
InferMaskTo<TOptions>
|
||||
>
|
||||
>
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
export type InferStructuralSharing<TOptions> = TOptions extends {
|
||||
structuralSharing: infer TStructuralSharing
|
||||
}
|
||||
? TStructuralSharing
|
||||
: unknown
|
||||
|
||||
export type ValidateUseSearchOptions<
|
||||
TOptions,
|
||||
TRouter extends AnyRouter = RegisteredRouter,
|
||||
> = Constrain<
|
||||
TOptions,
|
||||
UseSearchOptions<
|
||||
TRouter,
|
||||
InferFrom<TOptions>,
|
||||
InferStrict<TOptions>,
|
||||
InferShouldThrow<TOptions>,
|
||||
InferSelected<TOptions>,
|
||||
InferStructuralSharing<TOptions>
|
||||
>
|
||||
>
|
||||
|
||||
export type ValidateUseParamsOptions<
|
||||
TOptions,
|
||||
TRouter extends AnyRouter = RegisteredRouter,
|
||||
> = Constrain<
|
||||
TOptions,
|
||||
UseParamsOptions<
|
||||
TRouter,
|
||||
InferFrom<TOptions>,
|
||||
InferStrict<TOptions>,
|
||||
InferShouldThrow<TOptions>,
|
||||
InferSelected<TOptions>,
|
||||
InferSelected<TOptions>
|
||||
>
|
||||
>
|
||||
export type ValidateLinkOptionsArray<
|
||||
TRouter extends AnyRouter = RegisteredRouter,
|
||||
TOptions extends ReadonlyArray<any> = ReadonlyArray<unknown>,
|
||||
TDefaultFrom extends string = string,
|
||||
TComp = 'a',
|
||||
> = {
|
||||
[K in keyof TOptions]: ValidateLinkOptions<
|
||||
TRouter,
|
||||
TOptions[K],
|
||||
TDefaultFrom,
|
||||
TComp
|
||||
>
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,15 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = _toConsumableArray;
|
||||
var _arrayWithoutHoles = require("./arrayWithoutHoles.js");
|
||||
var _iterableToArray = require("./iterableToArray.js");
|
||||
var _unsupportedIterableToArray = require("./unsupportedIterableToArray.js");
|
||||
var _nonIterableSpread = require("./nonIterableSpread.js");
|
||||
function _toConsumableArray(arr) {
|
||||
return (0, _arrayWithoutHoles.default)(arr) || (0, _iterableToArray.default)(arr) || (0, _unsupportedIterableToArray.default)(arr) || (0, _nonIterableSpread.default)();
|
||||
}
|
||||
|
||||
//# sourceMappingURL=toConsumableArray.js.map
|
||||
@@ -0,0 +1,168 @@
|
||||
/**
|
||||
* @fileoverview Rule to disallow `\8` and `\9` escape sequences in string literals.
|
||||
* @author Milos Djermanovic
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const QUICK_TEST_REGEX = /\\[89]/u;
|
||||
|
||||
/**
|
||||
* Returns unicode escape sequence that represents the given character.
|
||||
* @param {string} character A single code unit.
|
||||
* @returns {string} "\uXXXX" sequence.
|
||||
*/
|
||||
function getUnicodeEscape(character) {
|
||||
return `\\u${character.charCodeAt(0).toString(16).padStart(4, "0")}`;
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/** @type {import('../shared/types').Rule} */
|
||||
module.exports = {
|
||||
meta: {
|
||||
type: "suggestion",
|
||||
|
||||
docs: {
|
||||
description:
|
||||
"Disallow `\\8` and `\\9` escape sequences in string literals",
|
||||
recommended: true,
|
||||
url: "https://eslint.org/docs/latest/rules/no-nonoctal-decimal-escape",
|
||||
},
|
||||
|
||||
hasSuggestions: true,
|
||||
|
||||
schema: [],
|
||||
|
||||
messages: {
|
||||
decimalEscape: "Don't use '{{decimalEscape}}' escape sequence.",
|
||||
|
||||
// suggestions
|
||||
refactor:
|
||||
"Replace '{{original}}' with '{{replacement}}'. This maintains the current functionality.",
|
||||
escapeBackslash:
|
||||
"Replace '{{original}}' with '{{replacement}}' to include the actual backslash character.",
|
||||
},
|
||||
},
|
||||
|
||||
create(context) {
|
||||
const sourceCode = context.sourceCode;
|
||||
|
||||
/**
|
||||
* Creates a new Suggestion object.
|
||||
* @param {string} messageId "refactor" or "escapeBackslash".
|
||||
* @param {int[]} range The range to replace.
|
||||
* @param {string} replacement New text for the range.
|
||||
* @returns {Object} Suggestion
|
||||
*/
|
||||
function createSuggestion(messageId, range, replacement) {
|
||||
return {
|
||||
messageId,
|
||||
data: {
|
||||
original: sourceCode.getText().slice(...range),
|
||||
replacement,
|
||||
},
|
||||
fix(fixer) {
|
||||
return fixer.replaceTextRange(range, replacement);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
Literal(node) {
|
||||
if (typeof node.value !== "string") {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!QUICK_TEST_REGEX.test(node.raw)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const regex =
|
||||
/(?:[^\\]|(?<previousEscape>\\.))*?(?<decimalEscape>\\[89])/suy;
|
||||
let match;
|
||||
|
||||
while ((match = regex.exec(node.raw))) {
|
||||
const { previousEscape, decimalEscape } = match.groups;
|
||||
const decimalEscapeRangeEnd =
|
||||
node.range[0] + match.index + match[0].length;
|
||||
const decimalEscapeRangeStart =
|
||||
decimalEscapeRangeEnd - decimalEscape.length;
|
||||
const decimalEscapeRange = [
|
||||
decimalEscapeRangeStart,
|
||||
decimalEscapeRangeEnd,
|
||||
];
|
||||
const suggest = [];
|
||||
|
||||
// When `regex` is matched, `previousEscape` can only capture characters adjacent to `decimalEscape`
|
||||
if (previousEscape === "\\0") {
|
||||
/*
|
||||
* Now we have a NULL escape "\0" immediately followed by a decimal escape, e.g.: "\0\8".
|
||||
* Fixing this to "\08" would turn "\0" into a legacy octal escape. To avoid producing
|
||||
* an octal escape while fixing a decimal escape, we provide different suggestions.
|
||||
*/
|
||||
suggest.push(
|
||||
createSuggestion(
|
||||
// "\0\8" -> "\u00008"
|
||||
"refactor",
|
||||
[
|
||||
decimalEscapeRangeStart -
|
||||
previousEscape.length,
|
||||
decimalEscapeRangeEnd,
|
||||
],
|
||||
`${getUnicodeEscape("\0")}${decimalEscape[1]}`,
|
||||
),
|
||||
createSuggestion(
|
||||
// "\8" -> "\u0038"
|
||||
"refactor",
|
||||
decimalEscapeRange,
|
||||
getUnicodeEscape(decimalEscape[1]),
|
||||
),
|
||||
);
|
||||
} else {
|
||||
suggest.push(
|
||||
createSuggestion(
|
||||
// "\8" -> "8"
|
||||
"refactor",
|
||||
decimalEscapeRange,
|
||||
decimalEscape[1],
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
suggest.push(
|
||||
createSuggestion(
|
||||
// "\8" -> "\\8"
|
||||
"escapeBackslash",
|
||||
decimalEscapeRange,
|
||||
`\\${decimalEscape}`,
|
||||
),
|
||||
);
|
||||
|
||||
context.report({
|
||||
node,
|
||||
loc: {
|
||||
start: sourceCode.getLocFromIndex(
|
||||
decimalEscapeRangeStart,
|
||||
),
|
||||
end: sourceCode.getLocFromIndex(
|
||||
decimalEscapeRangeEnd,
|
||||
),
|
||||
},
|
||||
messageId: "decimalEscape",
|
||||
data: {
|
||||
decimalEscape,
|
||||
},
|
||||
suggest,
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,129 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = normalizeFile;
|
||||
function _fs() {
|
||||
const data = require("fs");
|
||||
_fs = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _debug() {
|
||||
const data = require("debug");
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _t() {
|
||||
const data = require("@babel/types");
|
||||
_t = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _convertSourceMap() {
|
||||
const data = require("convert-source-map");
|
||||
_convertSourceMap = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
var _file = require("./file/file.js");
|
||||
var _index = require("../parser/index.js");
|
||||
var _cloneDeep = require("./util/clone-deep.js");
|
||||
const {
|
||||
file,
|
||||
traverseFast
|
||||
} = _t();
|
||||
const debug = _debug()("babel:transform:file");
|
||||
const INLINE_SOURCEMAP_REGEX = /^[@#]\s+sourceMappingURL=data:(?:application|text)\/json;(?:charset[:=]\S+?;)?base64,.*$/;
|
||||
const EXTERNAL_SOURCEMAP_REGEX = /^[@#][ \t]+sourceMappingURL=([^\s'"`]+)[ \t]*$/;
|
||||
function* normalizeFile(pluginPasses, options, code, ast) {
|
||||
code = `${code || ""}`;
|
||||
if (ast) {
|
||||
if (ast.type === "Program") {
|
||||
ast = file(ast, [], []);
|
||||
} else if (ast.type !== "File") {
|
||||
throw new Error("AST root must be a Program or File node");
|
||||
}
|
||||
if (options.cloneInputAst) {
|
||||
ast = (0, _cloneDeep.default)(ast);
|
||||
}
|
||||
} else {
|
||||
ast = yield* (0, _index.default)(pluginPasses, options, code);
|
||||
}
|
||||
let inputMap = null;
|
||||
if (options.inputSourceMap !== false) {
|
||||
if (typeof options.inputSourceMap === "object") {
|
||||
inputMap = _convertSourceMap().fromObject(options.inputSourceMap);
|
||||
}
|
||||
if (!inputMap) {
|
||||
const lastComment = extractComments(INLINE_SOURCEMAP_REGEX, ast);
|
||||
if (lastComment) {
|
||||
try {
|
||||
inputMap = _convertSourceMap().fromComment("//" + lastComment);
|
||||
} catch (err) {
|
||||
{
|
||||
debug("discarding unknown inline input sourcemap");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!inputMap) {
|
||||
const lastComment = extractComments(EXTERNAL_SOURCEMAP_REGEX, ast);
|
||||
if (typeof options.filename === "string" && lastComment) {
|
||||
try {
|
||||
const match = EXTERNAL_SOURCEMAP_REGEX.exec(lastComment);
|
||||
const inputMapContent = _fs().readFileSync(_path().resolve(_path().dirname(options.filename), match[1]), "utf8");
|
||||
inputMap = _convertSourceMap().fromJSON(inputMapContent);
|
||||
} catch (err) {
|
||||
debug("discarding unknown file input sourcemap", err);
|
||||
}
|
||||
} else if (lastComment) {
|
||||
debug("discarding un-loadable file input sourcemap");
|
||||
}
|
||||
}
|
||||
}
|
||||
return new _file.default(options, {
|
||||
code,
|
||||
ast: ast,
|
||||
inputMap
|
||||
});
|
||||
}
|
||||
function extractCommentsFromList(regex, comments, lastComment) {
|
||||
if (comments) {
|
||||
comments = comments.filter(({
|
||||
value
|
||||
}) => {
|
||||
if (regex.test(value)) {
|
||||
lastComment = value;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
return [comments, lastComment];
|
||||
}
|
||||
function extractComments(regex, ast) {
|
||||
let lastComment = null;
|
||||
traverseFast(ast, node => {
|
||||
[node.leadingComments, lastComment] = extractCommentsFromList(regex, node.leadingComments, lastComment);
|
||||
[node.innerComments, lastComment] = extractCommentsFromList(regex, node.innerComments, lastComment);
|
||||
[node.trailingComments, lastComment] = extractCommentsFromList(regex, node.trailingComments, lastComment);
|
||||
});
|
||||
return lastComment;
|
||||
}
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=normalize-file.js.map
|
||||
@@ -0,0 +1,9 @@
|
||||
import { Derived, Store } from '@tanstack/store';
|
||||
export * from '@tanstack/store';
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
export type NoInfer<T> = [T][T extends any ? 0 : never];
|
||||
export declare function useStore<TState, TSelected = NoInfer<TState>>(store: Store<TState, any>, selector?: (state: NoInfer<TState>) => TSelected): TSelected;
|
||||
export declare function useStore<TState, TSelected = NoInfer<TState>>(store: Derived<TState, any>, selector?: (state: NoInfer<TState>) => TSelected): TSelected;
|
||||
export declare function shallow<T>(objA: T, objB: T): boolean;
|
||||
@@ -0,0 +1 @@
|
||||
module.exports = require("./data/plugins.json");
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2436":"K D E F A B mC"},B:{"260":"O P","2436":"C L M G N","8196":"0 9 Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I"},C:{"2":"1 2 nC LC J PB K D E F A B C L M G N O P QB qC rC","772":"3 4 5 6 7 8 RB SB TB UB VB WB XB YB ZB aB bB cB dB","4100":"0 9 eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC"},D:{"2":"J PB K D E F A B C","2564":"1 2 3 4 5 6 7 8 L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB","8196":"0 9 vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","10244":"gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB"},E:{"1":"C L M G GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","16":"sC SC","2308":"A B TC FC","2820":"J PB K D E F tC uC vC wC"},F:{"2":"F B 4C 5C 6C 7C FC kC 8C","16":"C","516":"GC","2564":"1 2 3 4 5 6 7 8 G N O P QB RB SB","8196":"0 iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z","10244":"TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB"},G:{"1":"KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","2":"SC 9C lC","2820":"E AD BD CD DD ED FD GD HD ID JD"},H:{"2":"WD"},I:{"2":"LC J XD YD ZD aD lC","260":"I","2308":"bD cD"},J:{"2":"D","2308":"A"},K:{"2":"A B C FC kC","16":"GC","8196":"H"},L:{"8196":"I"},M:{"1028":"EC"},N:{"2":"A B"},O:{"8196":"HC"},P:{"2052":"dD eD","2308":"J","8196":"1 2 3 4 5 6 7 8 fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"8196":"oD"},R:{"8196":"pD"},S:{"4100":"qD rD"}},B:5,C:"Synchronous Clipboard API",D:true};
|
||||
@@ -0,0 +1,279 @@
|
||||
/// <reference path="./types/importMeta.d.ts" />
|
||||
|
||||
// CSS modules
|
||||
type CSSModuleClasses = { readonly [key: string]: string }
|
||||
|
||||
declare module '*.module.css' {
|
||||
const classes: CSSModuleClasses
|
||||
export default classes
|
||||
}
|
||||
declare module '*.module.scss' {
|
||||
const classes: CSSModuleClasses
|
||||
export default classes
|
||||
}
|
||||
declare module '*.module.sass' {
|
||||
const classes: CSSModuleClasses
|
||||
export default classes
|
||||
}
|
||||
declare module '*.module.less' {
|
||||
const classes: CSSModuleClasses
|
||||
export default classes
|
||||
}
|
||||
declare module '*.module.styl' {
|
||||
const classes: CSSModuleClasses
|
||||
export default classes
|
||||
}
|
||||
declare module '*.module.stylus' {
|
||||
const classes: CSSModuleClasses
|
||||
export default classes
|
||||
}
|
||||
declare module '*.module.pcss' {
|
||||
const classes: CSSModuleClasses
|
||||
export default classes
|
||||
}
|
||||
declare module '*.module.sss' {
|
||||
const classes: CSSModuleClasses
|
||||
export default classes
|
||||
}
|
||||
|
||||
// CSS
|
||||
declare module '*.css' {}
|
||||
declare module '*.scss' {}
|
||||
declare module '*.sass' {}
|
||||
declare module '*.less' {}
|
||||
declare module '*.styl' {}
|
||||
declare module '*.stylus' {}
|
||||
declare module '*.pcss' {}
|
||||
declare module '*.sss' {}
|
||||
|
||||
// Built-in asset types
|
||||
// see `src/node/constants.ts`
|
||||
|
||||
// images
|
||||
declare module '*.apng' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.bmp' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.png' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.jpg' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.jpeg' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.jfif' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.pjpeg' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.pjp' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.gif' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.svg' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.ico' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.webp' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.avif' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.cur' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.jxl' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
|
||||
// media
|
||||
declare module '*.mp4' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.webm' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.ogg' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.mp3' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.wav' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.flac' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.aac' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.opus' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.mov' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.m4a' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.vtt' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
|
||||
// fonts
|
||||
declare module '*.woff' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.woff2' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.eot' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.ttf' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.otf' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
|
||||
// other
|
||||
declare module '*.webmanifest' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.pdf' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
declare module '*.txt' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
|
||||
// wasm?init
|
||||
declare module '*.wasm?init' {
|
||||
const initWasm: (
|
||||
options?: WebAssembly.Imports,
|
||||
) => Promise<WebAssembly.Instance>
|
||||
export default initWasm
|
||||
}
|
||||
|
||||
// web worker
|
||||
declare module '*?worker' {
|
||||
const workerConstructor: {
|
||||
new (options?: { name?: string }): Worker
|
||||
}
|
||||
export default workerConstructor
|
||||
}
|
||||
|
||||
declare module '*?worker&inline' {
|
||||
const workerConstructor: {
|
||||
new (options?: { name?: string }): Worker
|
||||
}
|
||||
export default workerConstructor
|
||||
}
|
||||
|
||||
declare module '*?worker&url' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
|
||||
declare module '*?sharedworker' {
|
||||
const sharedWorkerConstructor: {
|
||||
new (options?: { name?: string }): SharedWorker
|
||||
}
|
||||
export default sharedWorkerConstructor
|
||||
}
|
||||
|
||||
declare module '*?sharedworker&inline' {
|
||||
const sharedWorkerConstructor: {
|
||||
new (options?: { name?: string }): SharedWorker
|
||||
}
|
||||
export default sharedWorkerConstructor
|
||||
}
|
||||
|
||||
declare module '*?sharedworker&url' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
|
||||
declare module '*?raw' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
|
||||
declare module '*?url' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
|
||||
declare module '*?inline' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
|
||||
declare module '*?no-inline' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
|
||||
declare module '*?url&inline' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
|
||||
declare module '*?url&no-inline' {
|
||||
const src: string
|
||||
export default src
|
||||
}
|
||||
|
||||
declare interface VitePreloadErrorEvent extends Event {
|
||||
payload: Error
|
||||
}
|
||||
|
||||
declare interface WindowEventMap {
|
||||
'vite:preloadError': VitePreloadErrorEvent
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,51 @@
|
||||
import type {
|
||||
AnyContext,
|
||||
AnyPathParams,
|
||||
AnyRoute,
|
||||
UpdatableRouteOptions,
|
||||
} from './route'
|
||||
import type { AnyValidator } from './validators'
|
||||
|
||||
export interface FileRouteTypes {
|
||||
fileRoutesByFullPath: any
|
||||
fullPaths: any
|
||||
to: any
|
||||
fileRoutesByTo: any
|
||||
id: any
|
||||
fileRoutesById: any
|
||||
}
|
||||
|
||||
export type InferFileRouteTypes<TRouteTree extends AnyRoute> =
|
||||
unknown extends TRouteTree['types']['fileRouteTypes']
|
||||
? never
|
||||
: TRouteTree['types']['fileRouteTypes'] extends FileRouteTypes
|
||||
? TRouteTree['types']['fileRouteTypes']
|
||||
: never
|
||||
|
||||
export interface FileRoutesByPath {
|
||||
// '/': {
|
||||
// parentRoute: typeof rootRoute
|
||||
// }
|
||||
}
|
||||
|
||||
export type LazyRouteOptions = Pick<
|
||||
UpdatableRouteOptions<
|
||||
AnyRoute,
|
||||
string,
|
||||
string,
|
||||
AnyPathParams,
|
||||
AnyValidator,
|
||||
{},
|
||||
AnyContext,
|
||||
AnyContext,
|
||||
AnyContext,
|
||||
AnyContext
|
||||
>,
|
||||
'component' | 'errorComponent' | 'pendingComponent' | 'notFoundComponent'
|
||||
>
|
||||
|
||||
export interface LazyRoute {
|
||||
options: {
|
||||
id: string
|
||||
} & LazyRouteOptions
|
||||
}
|
||||
Binary file not shown.
@@ -0,0 +1,612 @@
|
||||
/**
|
||||
* @fileoverview Rule to require or disallow newlines between statements
|
||||
* @author Toru Nagashima
|
||||
* @deprecated in ESLint v8.53.0
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const astUtils = require("./utils/ast-utils");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Helpers
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const LT = `[${Array.from(astUtils.LINEBREAKS).join("")}]`;
|
||||
const PADDING_LINE_SEQUENCE = new RegExp(
|
||||
String.raw`^(\s*?${LT})\s*${LT}(\s*;?)$`,
|
||||
"u",
|
||||
);
|
||||
const CJS_EXPORT = /^(?:module\s*\.\s*)?exports(?:\s*\.|\s*\[|$)/u;
|
||||
const CJS_IMPORT = /^require\(/u;
|
||||
|
||||
/**
|
||||
* Creates tester which check if a node starts with specific keyword.
|
||||
* @param {string} keyword The keyword to test.
|
||||
* @returns {Object} the created tester.
|
||||
* @private
|
||||
*/
|
||||
function newKeywordTester(keyword) {
|
||||
return {
|
||||
test: (node, sourceCode) =>
|
||||
sourceCode.getFirstToken(node).value === keyword,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates tester which check if a node starts with specific keyword and spans a single line.
|
||||
* @param {string} keyword The keyword to test.
|
||||
* @returns {Object} the created tester.
|
||||
* @private
|
||||
*/
|
||||
function newSinglelineKeywordTester(keyword) {
|
||||
return {
|
||||
test: (node, sourceCode) =>
|
||||
node.loc.start.line === node.loc.end.line &&
|
||||
sourceCode.getFirstToken(node).value === keyword,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates tester which check if a node starts with specific keyword and spans multiple lines.
|
||||
* @param {string} keyword The keyword to test.
|
||||
* @returns {Object} the created tester.
|
||||
* @private
|
||||
*/
|
||||
function newMultilineKeywordTester(keyword) {
|
||||
return {
|
||||
test: (node, sourceCode) =>
|
||||
node.loc.start.line !== node.loc.end.line &&
|
||||
sourceCode.getFirstToken(node).value === keyword,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates tester which check if a node is specific type.
|
||||
* @param {string} type The node type to test.
|
||||
* @returns {Object} the created tester.
|
||||
* @private
|
||||
*/
|
||||
function newNodeTypeTester(type) {
|
||||
return {
|
||||
test: node => node.type === type,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the given node is an expression statement of IIFE.
|
||||
* @param {ASTNode} node The node to check.
|
||||
* @returns {boolean} `true` if the node is an expression statement of IIFE.
|
||||
* @private
|
||||
*/
|
||||
function isIIFEStatement(node) {
|
||||
if (node.type === "ExpressionStatement") {
|
||||
let call = astUtils.skipChainExpression(node.expression);
|
||||
|
||||
if (call.type === "UnaryExpression") {
|
||||
call = astUtils.skipChainExpression(call.argument);
|
||||
}
|
||||
return (
|
||||
call.type === "CallExpression" && astUtils.isFunction(call.callee)
|
||||
);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the given node is a block-like statement.
|
||||
* This checks the last token of the node is the closing brace of a block.
|
||||
* @param {SourceCode} sourceCode The source code to get tokens.
|
||||
* @param {ASTNode} node The node to check.
|
||||
* @returns {boolean} `true` if the node is a block-like statement.
|
||||
* @private
|
||||
*/
|
||||
function isBlockLikeStatement(sourceCode, node) {
|
||||
// do-while with a block is a block-like statement.
|
||||
if (
|
||||
node.type === "DoWhileStatement" &&
|
||||
node.body.type === "BlockStatement"
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
* IIFE is a block-like statement specially from
|
||||
* JSCS#disallowPaddingNewLinesAfterBlocks.
|
||||
*/
|
||||
if (isIIFEStatement(node)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Checks the last token is a closing brace of blocks.
|
||||
const lastToken = sourceCode.getLastToken(
|
||||
node,
|
||||
astUtils.isNotSemicolonToken,
|
||||
);
|
||||
const belongingNode =
|
||||
lastToken && astUtils.isClosingBraceToken(lastToken)
|
||||
? sourceCode.getNodeByRangeIndex(lastToken.range[0])
|
||||
: null;
|
||||
|
||||
return (
|
||||
Boolean(belongingNode) &&
|
||||
(belongingNode.type === "BlockStatement" ||
|
||||
belongingNode.type === "SwitchStatement")
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the actual last token.
|
||||
*
|
||||
* If a semicolon is semicolon-less style's semicolon, this ignores it.
|
||||
* For example:
|
||||
*
|
||||
* foo()
|
||||
* ;[1, 2, 3].forEach(bar)
|
||||
* @param {SourceCode} sourceCode The source code to get tokens.
|
||||
* @param {ASTNode} node The node to get.
|
||||
* @returns {Token} The actual last token.
|
||||
* @private
|
||||
*/
|
||||
function getActualLastToken(sourceCode, node) {
|
||||
const semiToken = sourceCode.getLastToken(node);
|
||||
const prevToken = sourceCode.getTokenBefore(semiToken);
|
||||
const nextToken = sourceCode.getTokenAfter(semiToken);
|
||||
const isSemicolonLessStyle = Boolean(
|
||||
prevToken &&
|
||||
nextToken &&
|
||||
prevToken.range[0] >= node.range[0] &&
|
||||
astUtils.isSemicolonToken(semiToken) &&
|
||||
semiToken.loc.start.line !== prevToken.loc.end.line &&
|
||||
semiToken.loc.end.line === nextToken.loc.start.line,
|
||||
);
|
||||
|
||||
return isSemicolonLessStyle ? prevToken : semiToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* This returns the concatenation of the first 2 captured strings.
|
||||
* @param {string} _ Unused. Whole matched string.
|
||||
* @param {string} trailingSpaces The trailing spaces of the first line.
|
||||
* @param {string} indentSpaces The indentation spaces of the last line.
|
||||
* @returns {string} The concatenation of trailingSpaces and indentSpaces.
|
||||
* @private
|
||||
*/
|
||||
function replacerToRemovePaddingLines(_, trailingSpaces, indentSpaces) {
|
||||
return trailingSpaces + indentSpaces;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check and report statements for `any` configuration.
|
||||
* It does nothing.
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
function verifyForAny() {}
|
||||
|
||||
/**
|
||||
* Check and report statements for `never` configuration.
|
||||
* This autofix removes blank lines between the given 2 statements.
|
||||
* However, if comments exist between 2 blank lines, it does not remove those
|
||||
* blank lines automatically.
|
||||
* @param {RuleContext} context The rule context to report.
|
||||
* @param {ASTNode} _ Unused. The previous node to check.
|
||||
* @param {ASTNode} nextNode The next node to check.
|
||||
* @param {Array<Token[]>} paddingLines The array of token pairs that blank
|
||||
* lines exist between the pair.
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
function verifyForNever(context, _, nextNode, paddingLines) {
|
||||
if (paddingLines.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
context.report({
|
||||
node: nextNode,
|
||||
messageId: "unexpectedBlankLine",
|
||||
fix(fixer) {
|
||||
if (paddingLines.length >= 2) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const prevToken = paddingLines[0][0];
|
||||
const nextToken = paddingLines[0][1];
|
||||
const start = prevToken.range[1];
|
||||
const end = nextToken.range[0];
|
||||
const text = context.sourceCode.text
|
||||
.slice(start, end)
|
||||
.replace(PADDING_LINE_SEQUENCE, replacerToRemovePaddingLines);
|
||||
|
||||
return fixer.replaceTextRange([start, end], text);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check and report statements for `always` configuration.
|
||||
* This autofix inserts a blank line between the given 2 statements.
|
||||
* If the `prevNode` has trailing comments, it inserts a blank line after the
|
||||
* trailing comments.
|
||||
* @param {RuleContext} context The rule context to report.
|
||||
* @param {ASTNode} prevNode The previous node to check.
|
||||
* @param {ASTNode} nextNode The next node to check.
|
||||
* @param {Array<Token[]>} paddingLines The array of token pairs that blank
|
||||
* lines exist between the pair.
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
function verifyForAlways(context, prevNode, nextNode, paddingLines) {
|
||||
if (paddingLines.length > 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
context.report({
|
||||
node: nextNode,
|
||||
messageId: "expectedBlankLine",
|
||||
fix(fixer) {
|
||||
const sourceCode = context.sourceCode;
|
||||
let prevToken = getActualLastToken(sourceCode, prevNode);
|
||||
const nextToken =
|
||||
sourceCode.getFirstTokenBetween(prevToken, nextNode, {
|
||||
includeComments: true,
|
||||
|
||||
/**
|
||||
* Skip the trailing comments of the previous node.
|
||||
* This inserts a blank line after the last trailing comment.
|
||||
*
|
||||
* For example:
|
||||
*
|
||||
* foo(); // trailing comment.
|
||||
* // comment.
|
||||
* bar();
|
||||
*
|
||||
* Get fixed to:
|
||||
*
|
||||
* foo(); // trailing comment.
|
||||
*
|
||||
* // comment.
|
||||
* bar();
|
||||
* @param {Token} token The token to check.
|
||||
* @returns {boolean} `true` if the token is not a trailing comment.
|
||||
* @private
|
||||
*/
|
||||
filter(token) {
|
||||
if (astUtils.isTokenOnSameLine(prevToken, token)) {
|
||||
prevToken = token;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
}) || nextNode;
|
||||
const insertText = astUtils.isTokenOnSameLine(prevToken, nextToken)
|
||||
? "\n\n"
|
||||
: "\n";
|
||||
|
||||
return fixer.insertTextAfter(prevToken, insertText);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Types of blank lines.
|
||||
* `any`, `never`, and `always` are defined.
|
||||
* Those have `verify` method to check and report statements.
|
||||
* @private
|
||||
*/
|
||||
const PaddingTypes = {
|
||||
any: { verify: verifyForAny },
|
||||
never: { verify: verifyForNever },
|
||||
always: { verify: verifyForAlways },
|
||||
};
|
||||
|
||||
/**
|
||||
* Types of statements.
|
||||
* Those have `test` method to check it matches to the given statement.
|
||||
* @private
|
||||
*/
|
||||
const StatementTypes = {
|
||||
"*": { test: () => true },
|
||||
"block-like": {
|
||||
test: (node, sourceCode) => isBlockLikeStatement(sourceCode, node),
|
||||
},
|
||||
"cjs-export": {
|
||||
test: (node, sourceCode) =>
|
||||
node.type === "ExpressionStatement" &&
|
||||
node.expression.type === "AssignmentExpression" &&
|
||||
CJS_EXPORT.test(sourceCode.getText(node.expression.left)),
|
||||
},
|
||||
"cjs-import": {
|
||||
test: (node, sourceCode) =>
|
||||
node.type === "VariableDeclaration" &&
|
||||
node.declarations.length > 0 &&
|
||||
Boolean(node.declarations[0].init) &&
|
||||
CJS_IMPORT.test(sourceCode.getText(node.declarations[0].init)),
|
||||
},
|
||||
directive: {
|
||||
test: astUtils.isDirective,
|
||||
},
|
||||
expression: {
|
||||
test: node =>
|
||||
node.type === "ExpressionStatement" && !astUtils.isDirective(node),
|
||||
},
|
||||
iife: {
|
||||
test: isIIFEStatement,
|
||||
},
|
||||
"multiline-block-like": {
|
||||
test: (node, sourceCode) =>
|
||||
node.loc.start.line !== node.loc.end.line &&
|
||||
isBlockLikeStatement(sourceCode, node),
|
||||
},
|
||||
"multiline-expression": {
|
||||
test: node =>
|
||||
node.loc.start.line !== node.loc.end.line &&
|
||||
node.type === "ExpressionStatement" &&
|
||||
!astUtils.isDirective(node),
|
||||
},
|
||||
|
||||
"multiline-const": newMultilineKeywordTester("const"),
|
||||
"multiline-let": newMultilineKeywordTester("let"),
|
||||
"multiline-var": newMultilineKeywordTester("var"),
|
||||
"singleline-const": newSinglelineKeywordTester("const"),
|
||||
"singleline-let": newSinglelineKeywordTester("let"),
|
||||
"singleline-var": newSinglelineKeywordTester("var"),
|
||||
|
||||
block: newNodeTypeTester("BlockStatement"),
|
||||
empty: newNodeTypeTester("EmptyStatement"),
|
||||
function: newNodeTypeTester("FunctionDeclaration"),
|
||||
|
||||
break: newKeywordTester("break"),
|
||||
case: newKeywordTester("case"),
|
||||
class: newKeywordTester("class"),
|
||||
const: newKeywordTester("const"),
|
||||
continue: newKeywordTester("continue"),
|
||||
debugger: newKeywordTester("debugger"),
|
||||
default: newKeywordTester("default"),
|
||||
do: newKeywordTester("do"),
|
||||
export: newKeywordTester("export"),
|
||||
for: newKeywordTester("for"),
|
||||
if: newKeywordTester("if"),
|
||||
import: newKeywordTester("import"),
|
||||
let: newKeywordTester("let"),
|
||||
return: newKeywordTester("return"),
|
||||
switch: newKeywordTester("switch"),
|
||||
throw: newKeywordTester("throw"),
|
||||
try: newKeywordTester("try"),
|
||||
var: newKeywordTester("var"),
|
||||
while: newKeywordTester("while"),
|
||||
with: newKeywordTester("with"),
|
||||
};
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/** @type {import('../shared/types').Rule} */
|
||||
module.exports = {
|
||||
meta: {
|
||||
deprecated: {
|
||||
message: "Formatting rules are being moved out of ESLint core.",
|
||||
url: "https://eslint.org/blog/2023/10/deprecating-formatting-rules/",
|
||||
deprecatedSince: "8.53.0",
|
||||
availableUntil: "10.0.0",
|
||||
replacedBy: [
|
||||
{
|
||||
message:
|
||||
"ESLint Stylistic now maintains deprecated stylistic core rules.",
|
||||
url: "https://eslint.style/guide/migration",
|
||||
plugin: {
|
||||
name: "@stylistic/eslint-plugin-js",
|
||||
url: "https://eslint.style/packages/js",
|
||||
},
|
||||
rule: {
|
||||
name: "padding-line-between-statements",
|
||||
url: "https://eslint.style/rules/js/padding-line-between-statements",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
type: "layout",
|
||||
|
||||
docs: {
|
||||
description: "Require or disallow padding lines between statements",
|
||||
recommended: false,
|
||||
url: "https://eslint.org/docs/latest/rules/padding-line-between-statements",
|
||||
},
|
||||
|
||||
fixable: "whitespace",
|
||||
|
||||
schema: {
|
||||
definitions: {
|
||||
paddingType: {
|
||||
enum: Object.keys(PaddingTypes),
|
||||
},
|
||||
statementType: {
|
||||
anyOf: [
|
||||
{ enum: Object.keys(StatementTypes) },
|
||||
{
|
||||
type: "array",
|
||||
items: { enum: Object.keys(StatementTypes) },
|
||||
minItems: 1,
|
||||
uniqueItems: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
blankLine: { $ref: "#/definitions/paddingType" },
|
||||
prev: { $ref: "#/definitions/statementType" },
|
||||
next: { $ref: "#/definitions/statementType" },
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ["blankLine", "prev", "next"],
|
||||
},
|
||||
},
|
||||
|
||||
messages: {
|
||||
unexpectedBlankLine: "Unexpected blank line before this statement.",
|
||||
expectedBlankLine: "Expected blank line before this statement.",
|
||||
},
|
||||
},
|
||||
|
||||
create(context) {
|
||||
const sourceCode = context.sourceCode;
|
||||
const configureList = context.options || [];
|
||||
let scopeInfo = null;
|
||||
|
||||
/**
|
||||
* Processes to enter to new scope.
|
||||
* This manages the current previous statement.
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
function enterScope() {
|
||||
scopeInfo = {
|
||||
upper: scopeInfo,
|
||||
prevNode: null,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes to exit from the current scope.
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
function exitScope() {
|
||||
scopeInfo = scopeInfo.upper;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the given node matches the given type.
|
||||
* @param {ASTNode} node The statement node to check.
|
||||
* @param {string|string[]} type The statement type to check.
|
||||
* @returns {boolean} `true` if the statement node matched the type.
|
||||
* @private
|
||||
*/
|
||||
function match(node, type) {
|
||||
let innerStatementNode = node;
|
||||
|
||||
while (innerStatementNode.type === "LabeledStatement") {
|
||||
innerStatementNode = innerStatementNode.body;
|
||||
}
|
||||
if (Array.isArray(type)) {
|
||||
return type.some(match.bind(null, innerStatementNode));
|
||||
}
|
||||
return StatementTypes[type].test(innerStatementNode, sourceCode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the last matched configure from configureList.
|
||||
* @param {ASTNode} prevNode The previous statement to match.
|
||||
* @param {ASTNode} nextNode The current statement to match.
|
||||
* @returns {Object} The tester of the last matched configure.
|
||||
* @private
|
||||
*/
|
||||
function getPaddingType(prevNode, nextNode) {
|
||||
for (let i = configureList.length - 1; i >= 0; --i) {
|
||||
const configure = configureList[i];
|
||||
const matched =
|
||||
match(prevNode, configure.prev) &&
|
||||
match(nextNode, configure.next);
|
||||
|
||||
if (matched) {
|
||||
return PaddingTypes[configure.blankLine];
|
||||
}
|
||||
}
|
||||
return PaddingTypes.any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets padding line sequences between the given 2 statements.
|
||||
* Comments are separators of the padding line sequences.
|
||||
* @param {ASTNode} prevNode The previous statement to count.
|
||||
* @param {ASTNode} nextNode The current statement to count.
|
||||
* @returns {Array<Token[]>} The array of token pairs.
|
||||
* @private
|
||||
*/
|
||||
function getPaddingLineSequences(prevNode, nextNode) {
|
||||
const pairs = [];
|
||||
let prevToken = getActualLastToken(sourceCode, prevNode);
|
||||
|
||||
if (nextNode.loc.start.line - prevToken.loc.end.line >= 2) {
|
||||
do {
|
||||
const token = sourceCode.getTokenAfter(prevToken, {
|
||||
includeComments: true,
|
||||
});
|
||||
|
||||
if (token.loc.start.line - prevToken.loc.end.line >= 2) {
|
||||
pairs.push([prevToken, token]);
|
||||
}
|
||||
prevToken = token;
|
||||
} while (prevToken.range[0] < nextNode.range[0]);
|
||||
}
|
||||
|
||||
return pairs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify padding lines between the given node and the previous node.
|
||||
* @param {ASTNode} node The node to verify.
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
function verify(node) {
|
||||
const parentType = node.parent.type;
|
||||
const validParent =
|
||||
astUtils.STATEMENT_LIST_PARENTS.has(parentType) ||
|
||||
parentType === "SwitchStatement";
|
||||
|
||||
if (!validParent) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Save this node as the current previous statement.
|
||||
const prevNode = scopeInfo.prevNode;
|
||||
|
||||
// Verify.
|
||||
if (prevNode) {
|
||||
const type = getPaddingType(prevNode, node);
|
||||
const paddingLines = getPaddingLineSequences(prevNode, node);
|
||||
|
||||
type.verify(context, prevNode, node, paddingLines);
|
||||
}
|
||||
|
||||
scopeInfo.prevNode = node;
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify padding lines between the given node and the previous node.
|
||||
* Then process to enter to new scope.
|
||||
* @param {ASTNode} node The node to verify.
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
function verifyThenEnterScope(node) {
|
||||
verify(node);
|
||||
enterScope();
|
||||
}
|
||||
|
||||
return {
|
||||
Program: enterScope,
|
||||
BlockStatement: enterScope,
|
||||
SwitchStatement: enterScope,
|
||||
StaticBlock: enterScope,
|
||||
"Program:exit": exitScope,
|
||||
"BlockStatement:exit": exitScope,
|
||||
"SwitchStatement:exit": exitScope,
|
||||
"StaticBlock:exit": exitScope,
|
||||
|
||||
":statement": verify,
|
||||
|
||||
SwitchCase: verifyThenEnterScope,
|
||||
"SwitchCase:exit": exitScope,
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,506 @@
|
||||
'use strict';
|
||||
|
||||
var compileSchema = require('./compile')
|
||||
, resolve = require('./compile/resolve')
|
||||
, Cache = require('./cache')
|
||||
, SchemaObject = require('./compile/schema_obj')
|
||||
, stableStringify = require('fast-json-stable-stringify')
|
||||
, formats = require('./compile/formats')
|
||||
, rules = require('./compile/rules')
|
||||
, $dataMetaSchema = require('./data')
|
||||
, util = require('./compile/util');
|
||||
|
||||
module.exports = Ajv;
|
||||
|
||||
Ajv.prototype.validate = validate;
|
||||
Ajv.prototype.compile = compile;
|
||||
Ajv.prototype.addSchema = addSchema;
|
||||
Ajv.prototype.addMetaSchema = addMetaSchema;
|
||||
Ajv.prototype.validateSchema = validateSchema;
|
||||
Ajv.prototype.getSchema = getSchema;
|
||||
Ajv.prototype.removeSchema = removeSchema;
|
||||
Ajv.prototype.addFormat = addFormat;
|
||||
Ajv.prototype.errorsText = errorsText;
|
||||
|
||||
Ajv.prototype._addSchema = _addSchema;
|
||||
Ajv.prototype._compile = _compile;
|
||||
|
||||
Ajv.prototype.compileAsync = require('./compile/async');
|
||||
var customKeyword = require('./keyword');
|
||||
Ajv.prototype.addKeyword = customKeyword.add;
|
||||
Ajv.prototype.getKeyword = customKeyword.get;
|
||||
Ajv.prototype.removeKeyword = customKeyword.remove;
|
||||
Ajv.prototype.validateKeyword = customKeyword.validate;
|
||||
|
||||
var errorClasses = require('./compile/error_classes');
|
||||
Ajv.ValidationError = errorClasses.Validation;
|
||||
Ajv.MissingRefError = errorClasses.MissingRef;
|
||||
Ajv.$dataMetaSchema = $dataMetaSchema;
|
||||
|
||||
var META_SCHEMA_ID = 'http://json-schema.org/draft-07/schema';
|
||||
|
||||
var META_IGNORE_OPTIONS = [ 'removeAdditional', 'useDefaults', 'coerceTypes', 'strictDefaults' ];
|
||||
var META_SUPPORT_DATA = ['/properties'];
|
||||
|
||||
/**
|
||||
* Creates validator instance.
|
||||
* Usage: `Ajv(opts)`
|
||||
* @param {Object} opts optional options
|
||||
* @return {Object} ajv instance
|
||||
*/
|
||||
function Ajv(opts) {
|
||||
if (!(this instanceof Ajv)) return new Ajv(opts);
|
||||
opts = this._opts = util.copy(opts) || {};
|
||||
setLogger(this);
|
||||
this._schemas = {};
|
||||
this._refs = {};
|
||||
this._fragments = {};
|
||||
this._formats = formats(opts.format);
|
||||
|
||||
this._cache = opts.cache || new Cache;
|
||||
this._loadingSchemas = {};
|
||||
this._compilations = [];
|
||||
this.RULES = rules();
|
||||
this._getId = chooseGetId(opts);
|
||||
|
||||
opts.loopRequired = opts.loopRequired || Infinity;
|
||||
if (opts.errorDataPath == 'property') opts._errorDataPathProperty = true;
|
||||
if (opts.serialize === undefined) opts.serialize = stableStringify;
|
||||
this._metaOpts = getMetaSchemaOptions(this);
|
||||
|
||||
if (opts.formats) addInitialFormats(this);
|
||||
if (opts.keywords) addInitialKeywords(this);
|
||||
addDefaultMetaSchema(this);
|
||||
if (typeof opts.meta == 'object') this.addMetaSchema(opts.meta);
|
||||
if (opts.nullable) this.addKeyword('nullable', {metaSchema: {type: 'boolean'}});
|
||||
addInitialSchemas(this);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Validate data using schema
|
||||
* Schema will be compiled and cached (using serialized JSON as key. [fast-json-stable-stringify](https://github.com/epoberezkin/fast-json-stable-stringify) is used to serialize.
|
||||
* @this Ajv
|
||||
* @param {String|Object} schemaKeyRef key, ref or schema object
|
||||
* @param {Any} data to be validated
|
||||
* @return {Boolean} validation result. Errors from the last validation will be available in `ajv.errors` (and also in compiled schema: `schema.errors`).
|
||||
*/
|
||||
function validate(schemaKeyRef, data) {
|
||||
var v;
|
||||
if (typeof schemaKeyRef == 'string') {
|
||||
v = this.getSchema(schemaKeyRef);
|
||||
if (!v) throw new Error('no schema with key or ref "' + schemaKeyRef + '"');
|
||||
} else {
|
||||
var schemaObj = this._addSchema(schemaKeyRef);
|
||||
v = schemaObj.validate || this._compile(schemaObj);
|
||||
}
|
||||
|
||||
var valid = v(data);
|
||||
if (v.$async !== true) this.errors = v.errors;
|
||||
return valid;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create validating function for passed schema.
|
||||
* @this Ajv
|
||||
* @param {Object} schema schema object
|
||||
* @param {Boolean} _meta true if schema is a meta-schema. Used internally to compile meta schemas of custom keywords.
|
||||
* @return {Function} validating function
|
||||
*/
|
||||
function compile(schema, _meta) {
|
||||
var schemaObj = this._addSchema(schema, undefined, _meta);
|
||||
return schemaObj.validate || this._compile(schemaObj);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Adds schema to the instance.
|
||||
* @this Ajv
|
||||
* @param {Object|Array} schema schema or array of schemas. If array is passed, `key` and other parameters will be ignored.
|
||||
* @param {String} key Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`.
|
||||
* @param {Boolean} _skipValidation true to skip schema validation. Used internally, option validateSchema should be used instead.
|
||||
* @param {Boolean} _meta true if schema is a meta-schema. Used internally, addMetaSchema should be used instead.
|
||||
* @return {Ajv} this for method chaining
|
||||
*/
|
||||
function addSchema(schema, key, _skipValidation, _meta) {
|
||||
if (Array.isArray(schema)){
|
||||
for (var i=0; i<schema.length; i++) this.addSchema(schema[i], undefined, _skipValidation, _meta);
|
||||
return this;
|
||||
}
|
||||
var id = this._getId(schema);
|
||||
if (id !== undefined && typeof id != 'string')
|
||||
throw new Error('schema id must be string');
|
||||
key = resolve.normalizeId(key || id);
|
||||
checkUnique(this, key);
|
||||
this._schemas[key] = this._addSchema(schema, _skipValidation, _meta, true);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add schema that will be used to validate other schemas
|
||||
* options in META_IGNORE_OPTIONS are alway set to false
|
||||
* @this Ajv
|
||||
* @param {Object} schema schema object
|
||||
* @param {String} key optional schema key
|
||||
* @param {Boolean} skipValidation true to skip schema validation, can be used to override validateSchema option for meta-schema
|
||||
* @return {Ajv} this for method chaining
|
||||
*/
|
||||
function addMetaSchema(schema, key, skipValidation) {
|
||||
this.addSchema(schema, key, skipValidation, true);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Validate schema
|
||||
* @this Ajv
|
||||
* @param {Object} schema schema to validate
|
||||
* @param {Boolean} throwOrLogError pass true to throw (or log) an error if invalid
|
||||
* @return {Boolean} true if schema is valid
|
||||
*/
|
||||
function validateSchema(schema, throwOrLogError) {
|
||||
var $schema = schema.$schema;
|
||||
if ($schema !== undefined && typeof $schema != 'string')
|
||||
throw new Error('$schema must be a string');
|
||||
$schema = $schema || this._opts.defaultMeta || defaultMeta(this);
|
||||
if (!$schema) {
|
||||
this.logger.warn('meta-schema not available');
|
||||
this.errors = null;
|
||||
return true;
|
||||
}
|
||||
var valid = this.validate($schema, schema);
|
||||
if (!valid && throwOrLogError) {
|
||||
var message = 'schema is invalid: ' + this.errorsText();
|
||||
if (this._opts.validateSchema == 'log') this.logger.error(message);
|
||||
else throw new Error(message);
|
||||
}
|
||||
return valid;
|
||||
}
|
||||
|
||||
|
||||
function defaultMeta(self) {
|
||||
var meta = self._opts.meta;
|
||||
self._opts.defaultMeta = typeof meta == 'object'
|
||||
? self._getId(meta) || meta
|
||||
: self.getSchema(META_SCHEMA_ID)
|
||||
? META_SCHEMA_ID
|
||||
: undefined;
|
||||
return self._opts.defaultMeta;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get compiled schema from the instance by `key` or `ref`.
|
||||
* @this Ajv
|
||||
* @param {String} keyRef `key` that was passed to `addSchema` or full schema reference (`schema.id` or resolved id).
|
||||
* @return {Function} schema validating function (with property `schema`).
|
||||
*/
|
||||
function getSchema(keyRef) {
|
||||
var schemaObj = _getSchemaObj(this, keyRef);
|
||||
switch (typeof schemaObj) {
|
||||
case 'object': return schemaObj.validate || this._compile(schemaObj);
|
||||
case 'string': return this.getSchema(schemaObj);
|
||||
case 'undefined': return _getSchemaFragment(this, keyRef);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function _getSchemaFragment(self, ref) {
|
||||
var res = resolve.schema.call(self, { schema: {} }, ref);
|
||||
if (res) {
|
||||
var schema = res.schema
|
||||
, root = res.root
|
||||
, baseId = res.baseId;
|
||||
var v = compileSchema.call(self, schema, root, undefined, baseId);
|
||||
self._fragments[ref] = new SchemaObject({
|
||||
ref: ref,
|
||||
fragment: true,
|
||||
schema: schema,
|
||||
root: root,
|
||||
baseId: baseId,
|
||||
validate: v
|
||||
});
|
||||
return v;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function _getSchemaObj(self, keyRef) {
|
||||
keyRef = resolve.normalizeId(keyRef);
|
||||
return self._schemas[keyRef] || self._refs[keyRef] || self._fragments[keyRef];
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Remove cached schema(s).
|
||||
* If no parameter is passed all schemas but meta-schemas are removed.
|
||||
* If RegExp is passed all schemas with key/id matching pattern but meta-schemas are removed.
|
||||
* Even if schema is referenced by other schemas it still can be removed as other schemas have local references.
|
||||
* @this Ajv
|
||||
* @param {String|Object|RegExp} schemaKeyRef key, ref, pattern to match key/ref or schema object
|
||||
* @return {Ajv} this for method chaining
|
||||
*/
|
||||
function removeSchema(schemaKeyRef) {
|
||||
if (schemaKeyRef instanceof RegExp) {
|
||||
_removeAllSchemas(this, this._schemas, schemaKeyRef);
|
||||
_removeAllSchemas(this, this._refs, schemaKeyRef);
|
||||
return this;
|
||||
}
|
||||
switch (typeof schemaKeyRef) {
|
||||
case 'undefined':
|
||||
_removeAllSchemas(this, this._schemas);
|
||||
_removeAllSchemas(this, this._refs);
|
||||
this._cache.clear();
|
||||
return this;
|
||||
case 'string':
|
||||
var schemaObj = _getSchemaObj(this, schemaKeyRef);
|
||||
if (schemaObj) this._cache.del(schemaObj.cacheKey);
|
||||
delete this._schemas[schemaKeyRef];
|
||||
delete this._refs[schemaKeyRef];
|
||||
return this;
|
||||
case 'object':
|
||||
var serialize = this._opts.serialize;
|
||||
var cacheKey = serialize ? serialize(schemaKeyRef) : schemaKeyRef;
|
||||
this._cache.del(cacheKey);
|
||||
var id = this._getId(schemaKeyRef);
|
||||
if (id) {
|
||||
id = resolve.normalizeId(id);
|
||||
delete this._schemas[id];
|
||||
delete this._refs[id];
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
function _removeAllSchemas(self, schemas, regex) {
|
||||
for (var keyRef in schemas) {
|
||||
var schemaObj = schemas[keyRef];
|
||||
if (!schemaObj.meta && (!regex || regex.test(keyRef))) {
|
||||
self._cache.del(schemaObj.cacheKey);
|
||||
delete schemas[keyRef];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* @this Ajv */
|
||||
function _addSchema(schema, skipValidation, meta, shouldAddSchema) {
|
||||
if (typeof schema != 'object' && typeof schema != 'boolean')
|
||||
throw new Error('schema should be object or boolean');
|
||||
var serialize = this._opts.serialize;
|
||||
var cacheKey = serialize ? serialize(schema) : schema;
|
||||
var cached = this._cache.get(cacheKey);
|
||||
if (cached) return cached;
|
||||
|
||||
shouldAddSchema = shouldAddSchema || this._opts.addUsedSchema !== false;
|
||||
|
||||
var id = resolve.normalizeId(this._getId(schema));
|
||||
if (id && shouldAddSchema) checkUnique(this, id);
|
||||
|
||||
var willValidate = this._opts.validateSchema !== false && !skipValidation;
|
||||
var recursiveMeta;
|
||||
if (willValidate && !(recursiveMeta = id && id == resolve.normalizeId(schema.$schema)))
|
||||
this.validateSchema(schema, true);
|
||||
|
||||
var localRefs = resolve.ids.call(this, schema);
|
||||
|
||||
var schemaObj = new SchemaObject({
|
||||
id: id,
|
||||
schema: schema,
|
||||
localRefs: localRefs,
|
||||
cacheKey: cacheKey,
|
||||
meta: meta
|
||||
});
|
||||
|
||||
if (id[0] != '#' && shouldAddSchema) this._refs[id] = schemaObj;
|
||||
this._cache.put(cacheKey, schemaObj);
|
||||
|
||||
if (willValidate && recursiveMeta) this.validateSchema(schema, true);
|
||||
|
||||
return schemaObj;
|
||||
}
|
||||
|
||||
|
||||
/* @this Ajv */
|
||||
function _compile(schemaObj, root) {
|
||||
if (schemaObj.compiling) {
|
||||
schemaObj.validate = callValidate;
|
||||
callValidate.schema = schemaObj.schema;
|
||||
callValidate.errors = null;
|
||||
callValidate.root = root ? root : callValidate;
|
||||
if (schemaObj.schema.$async === true)
|
||||
callValidate.$async = true;
|
||||
return callValidate;
|
||||
}
|
||||
schemaObj.compiling = true;
|
||||
|
||||
var currentOpts;
|
||||
if (schemaObj.meta) {
|
||||
currentOpts = this._opts;
|
||||
this._opts = this._metaOpts;
|
||||
}
|
||||
|
||||
var v;
|
||||
try { v = compileSchema.call(this, schemaObj.schema, root, schemaObj.localRefs); }
|
||||
catch(e) {
|
||||
delete schemaObj.validate;
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
schemaObj.compiling = false;
|
||||
if (schemaObj.meta) this._opts = currentOpts;
|
||||
}
|
||||
|
||||
schemaObj.validate = v;
|
||||
schemaObj.refs = v.refs;
|
||||
schemaObj.refVal = v.refVal;
|
||||
schemaObj.root = v.root;
|
||||
return v;
|
||||
|
||||
|
||||
/* @this {*} - custom context, see passContext option */
|
||||
function callValidate() {
|
||||
/* jshint validthis: true */
|
||||
var _validate = schemaObj.validate;
|
||||
var result = _validate.apply(this, arguments);
|
||||
callValidate.errors = _validate.errors;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function chooseGetId(opts) {
|
||||
switch (opts.schemaId) {
|
||||
case 'auto': return _get$IdOrId;
|
||||
case 'id': return _getId;
|
||||
default: return _get$Id;
|
||||
}
|
||||
}
|
||||
|
||||
/* @this Ajv */
|
||||
function _getId(schema) {
|
||||
if (schema.$id) this.logger.warn('schema $id ignored', schema.$id);
|
||||
return schema.id;
|
||||
}
|
||||
|
||||
/* @this Ajv */
|
||||
function _get$Id(schema) {
|
||||
if (schema.id) this.logger.warn('schema id ignored', schema.id);
|
||||
return schema.$id;
|
||||
}
|
||||
|
||||
|
||||
function _get$IdOrId(schema) {
|
||||
if (schema.$id && schema.id && schema.$id != schema.id)
|
||||
throw new Error('schema $id is different from id');
|
||||
return schema.$id || schema.id;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Convert array of error message objects to string
|
||||
* @this Ajv
|
||||
* @param {Array<Object>} errors optional array of validation errors, if not passed errors from the instance are used.
|
||||
* @param {Object} options optional options with properties `separator` and `dataVar`.
|
||||
* @return {String} human readable string with all errors descriptions
|
||||
*/
|
||||
function errorsText(errors, options) {
|
||||
errors = errors || this.errors;
|
||||
if (!errors) return 'No errors';
|
||||
options = options || {};
|
||||
var separator = options.separator === undefined ? ', ' : options.separator;
|
||||
var dataVar = options.dataVar === undefined ? 'data' : options.dataVar;
|
||||
|
||||
var text = '';
|
||||
for (var i=0; i<errors.length; i++) {
|
||||
var e = errors[i];
|
||||
if (e) text += dataVar + e.dataPath + ' ' + e.message + separator;
|
||||
}
|
||||
return text.slice(0, -separator.length);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add custom format
|
||||
* @this Ajv
|
||||
* @param {String} name format name
|
||||
* @param {String|RegExp|Function} format string is converted to RegExp; function should return boolean (true when valid)
|
||||
* @return {Ajv} this for method chaining
|
||||
*/
|
||||
function addFormat(name, format) {
|
||||
if (typeof format == 'string') format = new RegExp(format);
|
||||
this._formats[name] = format;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
function addDefaultMetaSchema(self) {
|
||||
var $dataSchema;
|
||||
if (self._opts.$data) {
|
||||
$dataSchema = require('./refs/data.json');
|
||||
self.addMetaSchema($dataSchema, $dataSchema.$id, true);
|
||||
}
|
||||
if (self._opts.meta === false) return;
|
||||
var metaSchema = require('./refs/json-schema-draft-07.json');
|
||||
if (self._opts.$data) metaSchema = $dataMetaSchema(metaSchema, META_SUPPORT_DATA);
|
||||
self.addMetaSchema(metaSchema, META_SCHEMA_ID, true);
|
||||
self._refs['http://json-schema.org/schema'] = META_SCHEMA_ID;
|
||||
}
|
||||
|
||||
|
||||
function addInitialSchemas(self) {
|
||||
var optsSchemas = self._opts.schemas;
|
||||
if (!optsSchemas) return;
|
||||
if (Array.isArray(optsSchemas)) self.addSchema(optsSchemas);
|
||||
else for (var key in optsSchemas) self.addSchema(optsSchemas[key], key);
|
||||
}
|
||||
|
||||
|
||||
function addInitialFormats(self) {
|
||||
for (var name in self._opts.formats) {
|
||||
var format = self._opts.formats[name];
|
||||
self.addFormat(name, format);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function addInitialKeywords(self) {
|
||||
for (var name in self._opts.keywords) {
|
||||
var keyword = self._opts.keywords[name];
|
||||
self.addKeyword(name, keyword);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function checkUnique(self, id) {
|
||||
if (self._schemas[id] || self._refs[id])
|
||||
throw new Error('schema with key or id "' + id + '" already exists');
|
||||
}
|
||||
|
||||
|
||||
function getMetaSchemaOptions(self) {
|
||||
var metaOpts = util.copy(self._opts);
|
||||
for (var i=0; i<META_IGNORE_OPTIONS.length; i++)
|
||||
delete metaOpts[META_IGNORE_OPTIONS[i]];
|
||||
return metaOpts;
|
||||
}
|
||||
|
||||
|
||||
function setLogger(self) {
|
||||
var logger = self._opts.logger;
|
||||
if (logger === false) {
|
||||
self.logger = {log: noop, warn: noop, error: noop};
|
||||
} else {
|
||||
if (logger === undefined) logger = console;
|
||||
if (!(typeof logger == 'object' && logger.log && logger.warn && logger.error))
|
||||
throw new Error('logger must implement log, warn and error methods');
|
||||
self.logger = logger;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function noop() {}
|
||||
Reference in New Issue
Block a user