update
This commit is contained in:
@@ -0,0 +1,17 @@
|
||||
/*
|
||||
@license
|
||||
Rollup.js v4.39.0
|
||||
Wed, 02 Apr 2025 04:49:00 GMT - commit 5c001245779063abac3899aa9d25294ab003581b
|
||||
|
||||
https://github.com/rollup/rollup
|
||||
|
||||
Released under the MIT License.
|
||||
*/
|
||||
export { version as VERSION, defineConfig, rollup, watch } from './shared/node-entry.js';
|
||||
import './shared/parseAst.js';
|
||||
import '../native.js';
|
||||
import 'node:path';
|
||||
import 'path';
|
||||
import 'node:process';
|
||||
import 'node:perf_hooks';
|
||||
import 'node:fs/promises';
|
||||
@@ -0,0 +1,17 @@
|
||||
"use strict";
|
||||
|
||||
const { stringifyValueForError } = require("./shared");
|
||||
|
||||
module.exports = function ({ ruleId, value }) {
|
||||
return `
|
||||
Configuration for rule "${ruleId}" is invalid. Each rule must have a severity ("off", 0, "warn", 1, "error", or 2) and may be followed by additional options for the rule.
|
||||
|
||||
You passed '${stringifyValueForError(value, 4)}', which doesn't contain a valid severity.
|
||||
|
||||
If you're attempting to configure rule options, perhaps you meant:
|
||||
|
||||
"${ruleId}": ["error", ${stringifyValueForError(value, 8)}]
|
||||
|
||||
See https://eslint.org/docs/latest/use/configure/rules#using-configuration-files for configuring rules.
|
||||
`.trimStart();
|
||||
};
|
||||
@@ -0,0 +1,2 @@
|
||||
declare const equal: (a: any, b: any) => boolean;
|
||||
export = equal;
|
||||
@@ -0,0 +1,92 @@
|
||||
// Copyright (C) 2014 Yusuke Suzuki <utatane.tea@gmail.com>
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in the
|
||||
// documentation and/or other materials provided with the distribution.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
// ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
||||
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
// THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import gulp from 'gulp';
|
||||
import mocha from 'gulp-mocha';
|
||||
import eslint from 'gulp-eslint';
|
||||
import minimist from 'minimist';
|
||||
import git from 'gulp-git';
|
||||
import bump from 'gulp-bump';
|
||||
import filter from 'gulp-filter';
|
||||
import tagVersion from 'gulp-tag-version';
|
||||
import 'babel-register';
|
||||
|
||||
const SOURCE = [
|
||||
'*.js'
|
||||
];
|
||||
|
||||
let ESLINT_OPTION = {
|
||||
parser: 'babel-eslint',
|
||||
parserOptions: {
|
||||
'sourceType': 'module'
|
||||
},
|
||||
rules: {
|
||||
'quotes': 0,
|
||||
'eqeqeq': 0,
|
||||
'no-use-before-define': 0,
|
||||
'no-shadow': 0,
|
||||
'no-new': 0,
|
||||
'no-underscore-dangle': 0,
|
||||
'no-multi-spaces': 0,
|
||||
'no-native-reassign': 0,
|
||||
'no-loop-func': 0
|
||||
},
|
||||
env: {
|
||||
'node': true
|
||||
}
|
||||
};
|
||||
|
||||
gulp.task('test', function() {
|
||||
let options = minimist(process.argv.slice(2), {
|
||||
string: 'test',
|
||||
default: {
|
||||
test: 'test/*.js'
|
||||
}
|
||||
}
|
||||
);
|
||||
return gulp.src(options.test).pipe(mocha({reporter: 'spec'}));
|
||||
});
|
||||
|
||||
gulp.task('lint', () =>
|
||||
gulp.src(SOURCE)
|
||||
.pipe(eslint(ESLINT_OPTION))
|
||||
.pipe(eslint.formatEach('stylish', process.stderr))
|
||||
.pipe(eslint.failOnError())
|
||||
);
|
||||
|
||||
let inc = importance =>
|
||||
gulp.src(['./package.json'])
|
||||
.pipe(bump({type: importance}))
|
||||
.pipe(gulp.dest('./'))
|
||||
.pipe(git.commit('Bumps package version'))
|
||||
.pipe(filter('package.json'))
|
||||
.pipe(tagVersion({
|
||||
prefix: ''
|
||||
}))
|
||||
;
|
||||
|
||||
gulp.task('travis', [ 'lint', 'test' ]);
|
||||
gulp.task('default', [ 'travis' ]);
|
||||
|
||||
gulp.task('patch', [ ], () => inc('patch'));
|
||||
gulp.task('minor', [ ], () => inc('minor'));
|
||||
gulp.task('major', [ ], () => inc('major'));
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"names":["_checkInRHS","value","Object","TypeError"],"sources":["../../src/helpers/checkInRHS.ts"],"sourcesContent":["/* @minVersion 7.20.5 */\n\nexport default function _checkInRHS(value: unknown) {\n if (Object(value) !== value) {\n throw TypeError(\n \"right-hand side of 'in' should be an object, got \" +\n (value !== null ? typeof value : \"null\"),\n );\n }\n return value;\n}\n"],"mappings":";;;;;;AAEe,SAASA,WAAWA,CAACC,KAAc,EAAE;EAClD,IAAIC,MAAM,CAACD,KAAK,CAAC,KAAKA,KAAK,EAAE;IAC3B,MAAME,SAAS,CACb,mDAAmD,IAChDF,KAAK,KAAK,IAAI,GAAG,OAAOA,KAAK,GAAG,MAAM,CAC3C,CAAC;EACH;EACA,OAAOA,KAAK;AACd","ignoreList":[]}
|
||||
@@ -0,0 +1,459 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = applyDecs;
|
||||
var _setFunctionName = require("setFunctionName");
|
||||
var _toPropertyKey = require("toPropertyKey");
|
||||
function old_createMetadataMethodsForProperty(metadataMap, kind, property, decoratorFinishedRef) {
|
||||
return {
|
||||
getMetadata: function (key) {
|
||||
old_assertNotFinished(decoratorFinishedRef, "getMetadata");
|
||||
old_assertMetadataKey(key);
|
||||
var metadataForKey = metadataMap[key];
|
||||
if (metadataForKey === void 0) return void 0;
|
||||
if (kind === 1) {
|
||||
var pub = metadataForKey.public;
|
||||
if (pub !== void 0) {
|
||||
return pub[property];
|
||||
}
|
||||
} else if (kind === 2) {
|
||||
var priv = metadataForKey.private;
|
||||
if (priv !== void 0) {
|
||||
return priv.get(property);
|
||||
}
|
||||
} else if (Object.hasOwnProperty.call(metadataForKey, "constructor")) {
|
||||
return metadataForKey.constructor;
|
||||
}
|
||||
},
|
||||
setMetadata: function (key, value) {
|
||||
old_assertNotFinished(decoratorFinishedRef, "setMetadata");
|
||||
old_assertMetadataKey(key);
|
||||
var metadataForKey = metadataMap[key];
|
||||
if (metadataForKey === void 0) {
|
||||
metadataForKey = metadataMap[key] = {};
|
||||
}
|
||||
if (kind === 1) {
|
||||
var pub = metadataForKey.public;
|
||||
if (pub === void 0) {
|
||||
pub = metadataForKey.public = {};
|
||||
}
|
||||
pub[property] = value;
|
||||
} else if (kind === 2) {
|
||||
var priv = metadataForKey.priv;
|
||||
if (priv === void 0) {
|
||||
priv = metadataForKey.private = new Map();
|
||||
}
|
||||
priv.set(property, value);
|
||||
} else {
|
||||
metadataForKey.constructor = value;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
function old_convertMetadataMapToFinal(obj, metadataMap) {
|
||||
var parentMetadataMap = obj[Symbol.metadata || Symbol.for("Symbol.metadata")];
|
||||
var metadataKeys = Object.getOwnPropertySymbols(metadataMap);
|
||||
if (metadataKeys.length === 0) return;
|
||||
for (var i = 0; i < metadataKeys.length; i++) {
|
||||
var key = metadataKeys[i];
|
||||
var metaForKey = metadataMap[key];
|
||||
var parentMetaForKey = parentMetadataMap ? parentMetadataMap[key] : null;
|
||||
var pub = metaForKey.public;
|
||||
var parentPub = parentMetaForKey ? parentMetaForKey.public : null;
|
||||
if (pub && parentPub) {
|
||||
Object.setPrototypeOf(pub, parentPub);
|
||||
}
|
||||
var priv = metaForKey.private;
|
||||
if (priv) {
|
||||
var privArr = Array.from(priv.values());
|
||||
var parentPriv = parentMetaForKey ? parentMetaForKey.private : null;
|
||||
if (parentPriv) {
|
||||
privArr = privArr.concat(parentPriv);
|
||||
}
|
||||
metaForKey.private = privArr;
|
||||
}
|
||||
if (parentMetaForKey) {
|
||||
Object.setPrototypeOf(metaForKey, parentMetaForKey);
|
||||
}
|
||||
}
|
||||
if (parentMetadataMap) {
|
||||
Object.setPrototypeOf(metadataMap, parentMetadataMap);
|
||||
}
|
||||
obj[Symbol.metadata || Symbol.for("Symbol.metadata")] = metadataMap;
|
||||
}
|
||||
function old_createAddInitializerMethod(initializers, decoratorFinishedRef) {
|
||||
return function addInitializer(initializer) {
|
||||
old_assertNotFinished(decoratorFinishedRef, "addInitializer");
|
||||
old_assertCallable(initializer, "An initializer");
|
||||
initializers.push(initializer);
|
||||
};
|
||||
}
|
||||
function old_memberDec(dec, name, desc, metadataMap, initializers, kind, isStatic, isPrivate, value) {
|
||||
var kindStr;
|
||||
switch (kind) {
|
||||
case 1:
|
||||
kindStr = "accessor";
|
||||
break;
|
||||
case 2:
|
||||
kindStr = "method";
|
||||
break;
|
||||
case 3:
|
||||
kindStr = "getter";
|
||||
break;
|
||||
case 4:
|
||||
kindStr = "setter";
|
||||
break;
|
||||
default:
|
||||
kindStr = "field";
|
||||
}
|
||||
var ctx = {
|
||||
kind: kindStr,
|
||||
name: isPrivate ? "#" + name : _toPropertyKey(name),
|
||||
isStatic: isStatic,
|
||||
isPrivate: isPrivate
|
||||
};
|
||||
var decoratorFinishedRef = {
|
||||
v: false
|
||||
};
|
||||
if (kind !== 0) {
|
||||
ctx.addInitializer = old_createAddInitializerMethod(initializers, decoratorFinishedRef);
|
||||
}
|
||||
var metadataKind, metadataName;
|
||||
if (isPrivate) {
|
||||
metadataKind = 2;
|
||||
metadataName = Symbol(name);
|
||||
var access = {};
|
||||
if (kind === 0) {
|
||||
access.get = desc.get;
|
||||
access.set = desc.set;
|
||||
} else if (kind === 2) {
|
||||
access.get = function () {
|
||||
return desc.value;
|
||||
};
|
||||
} else {
|
||||
if (kind === 1 || kind === 3) {
|
||||
access.get = function () {
|
||||
return desc.get.call(this);
|
||||
};
|
||||
}
|
||||
if (kind === 1 || kind === 4) {
|
||||
access.set = function (v) {
|
||||
desc.set.call(this, v);
|
||||
};
|
||||
}
|
||||
}
|
||||
ctx.access = access;
|
||||
} else {
|
||||
metadataKind = 1;
|
||||
metadataName = name;
|
||||
}
|
||||
try {
|
||||
return dec(value, Object.assign(ctx, old_createMetadataMethodsForProperty(metadataMap, metadataKind, metadataName, decoratorFinishedRef)));
|
||||
} finally {
|
||||
decoratorFinishedRef.v = true;
|
||||
}
|
||||
}
|
||||
function old_assertNotFinished(decoratorFinishedRef, fnName) {
|
||||
if (decoratorFinishedRef.v) {
|
||||
throw new Error("attempted to call " + fnName + " after decoration was finished");
|
||||
}
|
||||
}
|
||||
function old_assertMetadataKey(key) {
|
||||
if (typeof key !== "symbol") {
|
||||
throw new TypeError("Metadata keys must be symbols, received: " + key);
|
||||
}
|
||||
}
|
||||
function old_assertCallable(fn, hint) {
|
||||
if (typeof fn !== "function") {
|
||||
throw new TypeError(hint + " must be a function");
|
||||
}
|
||||
}
|
||||
function old_assertValidReturnValue(kind, value) {
|
||||
var type = typeof value;
|
||||
if (kind === 1) {
|
||||
if (type !== "object" || value === null) {
|
||||
throw new TypeError("accessor decorators must return an object with get, set, or init properties or void 0");
|
||||
}
|
||||
if (value.get !== undefined) {
|
||||
old_assertCallable(value.get, "accessor.get");
|
||||
}
|
||||
if (value.set !== undefined) {
|
||||
old_assertCallable(value.set, "accessor.set");
|
||||
}
|
||||
if (value.init !== undefined) {
|
||||
old_assertCallable(value.init, "accessor.init");
|
||||
}
|
||||
if (value.initializer !== undefined) {
|
||||
old_assertCallable(value.initializer, "accessor.initializer");
|
||||
}
|
||||
} else if (type !== "function") {
|
||||
var hint;
|
||||
if (kind === 0) {
|
||||
hint = "field";
|
||||
} else if (kind === 10) {
|
||||
hint = "class";
|
||||
} else {
|
||||
hint = "method";
|
||||
}
|
||||
throw new TypeError(hint + " decorators must return a function or void 0");
|
||||
}
|
||||
}
|
||||
function old_getInit(desc) {
|
||||
var initializer;
|
||||
if ((initializer = desc.init) == null && (initializer = desc.initializer) && typeof console !== "undefined") {
|
||||
console.warn(".initializer has been renamed to .init as of March 2022");
|
||||
}
|
||||
return initializer;
|
||||
}
|
||||
function old_applyMemberDec(ret, base, decInfo, name, kind, isStatic, isPrivate, metadataMap, initializers) {
|
||||
var decs = decInfo[0];
|
||||
var desc, initializer, prefix, value;
|
||||
if (isPrivate) {
|
||||
if (kind === 0 || kind === 1) {
|
||||
desc = {
|
||||
get: decInfo[3],
|
||||
set: decInfo[4]
|
||||
};
|
||||
prefix = "get";
|
||||
} else if (kind === 3) {
|
||||
desc = {
|
||||
get: decInfo[3]
|
||||
};
|
||||
prefix = "get";
|
||||
} else if (kind === 4) {
|
||||
desc = {
|
||||
set: decInfo[3]
|
||||
};
|
||||
prefix = "set";
|
||||
} else {
|
||||
desc = {
|
||||
value: decInfo[3]
|
||||
};
|
||||
}
|
||||
if (kind !== 0) {
|
||||
if (kind === 1) {
|
||||
_setFunctionName(decInfo[4], "#" + name, "set");
|
||||
}
|
||||
_setFunctionName(decInfo[3], "#" + name, prefix);
|
||||
}
|
||||
} else if (kind !== 0) {
|
||||
desc = Object.getOwnPropertyDescriptor(base, name);
|
||||
}
|
||||
if (kind === 1) {
|
||||
value = {
|
||||
get: desc.get,
|
||||
set: desc.set
|
||||
};
|
||||
} else if (kind === 2) {
|
||||
value = desc.value;
|
||||
} else if (kind === 3) {
|
||||
value = desc.get;
|
||||
} else if (kind === 4) {
|
||||
value = desc.set;
|
||||
}
|
||||
var newValue, get, set;
|
||||
if (typeof decs === "function") {
|
||||
newValue = old_memberDec(decs, name, desc, metadataMap, initializers, kind, isStatic, isPrivate, value);
|
||||
if (newValue !== void 0) {
|
||||
old_assertValidReturnValue(kind, newValue);
|
||||
if (kind === 0) {
|
||||
initializer = newValue;
|
||||
} else if (kind === 1) {
|
||||
initializer = old_getInit(newValue);
|
||||
get = newValue.get || value.get;
|
||||
set = newValue.set || value.set;
|
||||
value = {
|
||||
get: get,
|
||||
set: set
|
||||
};
|
||||
} else {
|
||||
value = newValue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (var i = decs.length - 1; i >= 0; i--) {
|
||||
var dec = decs[i];
|
||||
newValue = old_memberDec(dec, name, desc, metadataMap, initializers, kind, isStatic, isPrivate, value);
|
||||
if (newValue !== void 0) {
|
||||
old_assertValidReturnValue(kind, newValue);
|
||||
var newInit;
|
||||
if (kind === 0) {
|
||||
newInit = newValue;
|
||||
} else if (kind === 1) {
|
||||
newInit = old_getInit(newValue);
|
||||
get = newValue.get || value.get;
|
||||
set = newValue.set || value.set;
|
||||
value = {
|
||||
get: get,
|
||||
set: set
|
||||
};
|
||||
} else {
|
||||
value = newValue;
|
||||
}
|
||||
if (newInit !== void 0) {
|
||||
if (initializer === void 0) {
|
||||
initializer = newInit;
|
||||
} else if (typeof initializer === "function") {
|
||||
initializer = [initializer, newInit];
|
||||
} else {
|
||||
initializer.push(newInit);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (kind === 0 || kind === 1) {
|
||||
if (initializer === void 0) {
|
||||
initializer = function (instance, init) {
|
||||
return init;
|
||||
};
|
||||
} else if (typeof initializer !== "function") {
|
||||
var ownInitializers = initializer;
|
||||
initializer = function (instance, init) {
|
||||
var value = init;
|
||||
for (var i = 0; i < ownInitializers.length; i++) {
|
||||
value = ownInitializers[i].call(instance, value);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
} else {
|
||||
var originalInitializer = initializer;
|
||||
initializer = function (instance, init) {
|
||||
return originalInitializer.call(instance, init);
|
||||
};
|
||||
}
|
||||
ret.push(initializer);
|
||||
}
|
||||
if (kind !== 0) {
|
||||
if (kind === 1) {
|
||||
desc.get = value.get;
|
||||
desc.set = value.set;
|
||||
} else if (kind === 2) {
|
||||
desc.value = value;
|
||||
} else if (kind === 3) {
|
||||
desc.get = value;
|
||||
} else if (kind === 4) {
|
||||
desc.set = value;
|
||||
}
|
||||
if (isPrivate) {
|
||||
if (kind === 1) {
|
||||
ret.push(function (instance, args) {
|
||||
return value.get.call(instance, args);
|
||||
});
|
||||
ret.push(function (instance, args) {
|
||||
return value.set.call(instance, args);
|
||||
});
|
||||
} else if (kind === 2) {
|
||||
ret.push(value);
|
||||
} else {
|
||||
ret.push(function (instance, args) {
|
||||
return value.call(instance, args);
|
||||
});
|
||||
}
|
||||
} else {
|
||||
Object.defineProperty(base, name, desc);
|
||||
}
|
||||
}
|
||||
}
|
||||
function old_applyMemberDecs(ret, Class, protoMetadataMap, staticMetadataMap, decInfos) {
|
||||
var protoInitializers;
|
||||
var staticInitializers;
|
||||
var existingProtoNonFields = new Map();
|
||||
var existingStaticNonFields = new Map();
|
||||
for (var i = 0; i < decInfos.length; i++) {
|
||||
var decInfo = decInfos[i];
|
||||
if (!Array.isArray(decInfo)) continue;
|
||||
var kind = decInfo[1];
|
||||
var name = decInfo[2];
|
||||
var isPrivate = decInfo.length > 3;
|
||||
var isStatic = kind >= 5;
|
||||
var base;
|
||||
var metadataMap;
|
||||
var initializers;
|
||||
if (isStatic) {
|
||||
base = Class;
|
||||
metadataMap = staticMetadataMap;
|
||||
kind = kind - 5;
|
||||
if (kind !== 0) {
|
||||
staticInitializers = staticInitializers || [];
|
||||
initializers = staticInitializers;
|
||||
}
|
||||
} else {
|
||||
base = Class.prototype;
|
||||
metadataMap = protoMetadataMap;
|
||||
if (kind !== 0) {
|
||||
protoInitializers = protoInitializers || [];
|
||||
initializers = protoInitializers;
|
||||
}
|
||||
}
|
||||
if (kind !== 0 && !isPrivate) {
|
||||
var existingNonFields = isStatic ? existingStaticNonFields : existingProtoNonFields;
|
||||
var existingKind = existingNonFields.get(name) || 0;
|
||||
if (existingKind === true || existingKind === 3 && kind !== 4 || existingKind === 4 && kind !== 3) {
|
||||
throw new Error("Attempted to decorate a public method/accessor that has the same name as a previously decorated public method/accessor. This is not currently supported by the decorators plugin. Property name was: " + name);
|
||||
} else if (!existingKind && kind > 2) {
|
||||
existingNonFields.set(name, kind);
|
||||
} else {
|
||||
existingNonFields.set(name, true);
|
||||
}
|
||||
}
|
||||
old_applyMemberDec(ret, base, decInfo, name, kind, isStatic, isPrivate, metadataMap, initializers);
|
||||
}
|
||||
old_pushInitializers(ret, protoInitializers);
|
||||
old_pushInitializers(ret, staticInitializers);
|
||||
}
|
||||
function old_pushInitializers(ret, initializers) {
|
||||
if (initializers) {
|
||||
ret.push(function (instance) {
|
||||
for (var i = 0; i < initializers.length; i++) {
|
||||
initializers[i].call(instance);
|
||||
}
|
||||
return instance;
|
||||
});
|
||||
}
|
||||
}
|
||||
function old_applyClassDecs(ret, targetClass, metadataMap, classDecs) {
|
||||
if (classDecs.length > 0) {
|
||||
var initializers = [];
|
||||
var newClass = targetClass;
|
||||
var name = targetClass.name;
|
||||
for (var i = classDecs.length - 1; i >= 0; i--) {
|
||||
var decoratorFinishedRef = {
|
||||
v: false
|
||||
};
|
||||
try {
|
||||
var ctx = Object.assign({
|
||||
kind: "class",
|
||||
name: name,
|
||||
addInitializer: old_createAddInitializerMethod(initializers, decoratorFinishedRef)
|
||||
}, old_createMetadataMethodsForProperty(metadataMap, 0, name, decoratorFinishedRef));
|
||||
var nextNewClass = classDecs[i](newClass, ctx);
|
||||
} finally {
|
||||
decoratorFinishedRef.v = true;
|
||||
}
|
||||
if (nextNewClass !== undefined) {
|
||||
old_assertValidReturnValue(10, nextNewClass);
|
||||
newClass = nextNewClass;
|
||||
}
|
||||
}
|
||||
ret.push(newClass, function () {
|
||||
for (var i = 0; i < initializers.length; i++) {
|
||||
initializers[i].call(newClass);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
function applyDecs(targetClass, memberDecs, classDecs) {
|
||||
var ret = [];
|
||||
var staticMetadataMap = {};
|
||||
var protoMetadataMap = {};
|
||||
old_applyMemberDecs(ret, targetClass, protoMetadataMap, staticMetadataMap, memberDecs);
|
||||
old_convertMetadataMapToFinal(targetClass.prototype, protoMetadataMap);
|
||||
old_applyClassDecs(ret, targetClass, staticMetadataMap, classDecs);
|
||||
old_convertMetadataMapToFinal(targetClass, staticMetadataMap);
|
||||
return ret;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=applyDecs.js.map
|
||||
@@ -0,0 +1,93 @@
|
||||
'use strict'
|
||||
|
||||
/*!
|
||||
* Canvas - Image
|
||||
* Copyright (c) 2010 LearnBoost <tj@learnboost.com>
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const bindings = require('./bindings')
|
||||
const Image = module.exports = bindings.Image
|
||||
const util = require('util')
|
||||
|
||||
const { GetSource, SetSource } = bindings
|
||||
|
||||
Object.defineProperty(Image.prototype, 'src', {
|
||||
/**
|
||||
* src setter. Valid values:
|
||||
* * `data:` URI
|
||||
* * Local file path
|
||||
* * HTTP or HTTPS URL
|
||||
* * Buffer containing image data (i.e. not a `data:` URI stored in a Buffer)
|
||||
*
|
||||
* @param {String|Buffer} val filename, buffer, data URI, URL
|
||||
* @api public
|
||||
*/
|
||||
set (val) {
|
||||
if (typeof val === 'string') {
|
||||
if (/^\s*data:/.test(val)) { // data: URI
|
||||
const commaI = val.indexOf(',')
|
||||
// 'base64' must come before the comma
|
||||
const isBase64 = val.lastIndexOf('base64', commaI) !== -1
|
||||
const content = val.slice(commaI + 1)
|
||||
setSource(this, Buffer.from(content, isBase64 ? 'base64' : 'utf8'), val)
|
||||
} else if (/^\s*https?:\/\//.test(val)) { // remote URL
|
||||
const onerror = err => {
|
||||
if (typeof this.onerror === 'function') {
|
||||
this.onerror(err)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
fetch(val, {
|
||||
method: 'GET',
|
||||
headers: { 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36' }
|
||||
})
|
||||
.then(res => {
|
||||
if (!res.ok) {
|
||||
throw new Error(`Server responded with ${res.statusCode}`)
|
||||
}
|
||||
return res.arrayBuffer()
|
||||
})
|
||||
.then(data => {
|
||||
setSource(this, Buffer.from(data))
|
||||
})
|
||||
.catch(onerror)
|
||||
} else { // local file path assumed
|
||||
setSource(this, val)
|
||||
}
|
||||
} else if (Buffer.isBuffer(val)) {
|
||||
setSource(this, val)
|
||||
}
|
||||
},
|
||||
|
||||
get () {
|
||||
// TODO https://github.com/Automattic/node-canvas/issues/118
|
||||
return getSource(this)
|
||||
},
|
||||
|
||||
configurable: true
|
||||
})
|
||||
|
||||
// TODO || is for Node.js pre-v6.6.0
|
||||
Image.prototype[util.inspect.custom || 'inspect'] = function () {
|
||||
return '[Image' +
|
||||
(this.complete ? ':' + this.width + 'x' + this.height : '') +
|
||||
(this.src ? ' ' + this.src : '') +
|
||||
(this.complete ? ' complete' : '') +
|
||||
']'
|
||||
}
|
||||
|
||||
function getSource (img) {
|
||||
return img._originalSource || GetSource.call(img)
|
||||
}
|
||||
|
||||
function setSource (img, src, origSrc) {
|
||||
SetSource.call(img, src)
|
||||
img._originalSource = origSrc
|
||||
}
|
||||
@@ -0,0 +1,559 @@
|
||||
/*
|
||||
@license
|
||||
Rollup.js v4.39.0
|
||||
Wed, 02 Apr 2025 04:49:00 GMT - commit 5c001245779063abac3899aa9d25294ab003581b
|
||||
|
||||
https://github.com/rollup/rollup
|
||||
|
||||
Released under the MIT License.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
const promises = require('node:fs/promises');
|
||||
const path = require('node:path');
|
||||
const process$1 = require('node:process');
|
||||
const node_url = require('node:url');
|
||||
const rollup = require('./rollup.js');
|
||||
const parseAst_js = require('./parseAst.js');
|
||||
const getLogFilter_js = require('../getLogFilter.js');
|
||||
|
||||
function batchWarnings(command) {
|
||||
const silent = !!command.silent;
|
||||
const logFilter = generateLogFilter(command);
|
||||
let count = 0;
|
||||
const deferredWarnings = new Map();
|
||||
let warningOccurred = false;
|
||||
const add = (warning) => {
|
||||
count += 1;
|
||||
warningOccurred = true;
|
||||
if (silent)
|
||||
return;
|
||||
if (warning.code in deferredHandlers) {
|
||||
rollup.getOrCreate(deferredWarnings, warning.code, rollup.getNewArray).push(warning);
|
||||
}
|
||||
else if (warning.code in immediateHandlers) {
|
||||
immediateHandlers[warning.code](warning);
|
||||
}
|
||||
else {
|
||||
title(warning.message);
|
||||
defaultBody(warning);
|
||||
}
|
||||
};
|
||||
return {
|
||||
add,
|
||||
get count() {
|
||||
return count;
|
||||
},
|
||||
flush() {
|
||||
if (count === 0 || silent)
|
||||
return;
|
||||
const codes = [...deferredWarnings.keys()].sort((a, b) => deferredWarnings.get(b).length - deferredWarnings.get(a).length);
|
||||
for (const code of codes) {
|
||||
deferredHandlers[code](deferredWarnings.get(code));
|
||||
}
|
||||
deferredWarnings.clear();
|
||||
count = 0;
|
||||
},
|
||||
log(level, log) {
|
||||
if (!logFilter(log))
|
||||
return;
|
||||
switch (level) {
|
||||
case parseAst_js.LOGLEVEL_WARN: {
|
||||
return add(log);
|
||||
}
|
||||
case parseAst_js.LOGLEVEL_DEBUG: {
|
||||
if (!silent) {
|
||||
rollup.stderr(rollup.bold(rollup.pc.blue(log.message)));
|
||||
defaultBody(log);
|
||||
}
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
if (!silent) {
|
||||
rollup.stderr(rollup.bold(rollup.pc.cyan(log.message)));
|
||||
defaultBody(log);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
get warningOccurred() {
|
||||
return warningOccurred;
|
||||
}
|
||||
};
|
||||
}
|
||||
const immediateHandlers = {
|
||||
MISSING_NODE_BUILTINS(warning) {
|
||||
title(`Missing shims for Node.js built-ins`);
|
||||
rollup.stderr(`Creating a browser bundle that depends on ${parseAst_js.printQuotedStringList(warning.ids)}. You might need to include https://github.com/FredKSchott/rollup-plugin-polyfill-node`);
|
||||
},
|
||||
UNKNOWN_OPTION(warning) {
|
||||
title(`You have passed an unrecognized option`);
|
||||
rollup.stderr(warning.message);
|
||||
}
|
||||
};
|
||||
const deferredHandlers = {
|
||||
CIRCULAR_DEPENDENCY(warnings) {
|
||||
title(`Circular dependenc${warnings.length > 1 ? 'ies' : 'y'}`);
|
||||
const displayed = warnings.length > 5 ? warnings.slice(0, 3) : warnings;
|
||||
for (const warning of displayed) {
|
||||
rollup.stderr(warning.ids.map(parseAst_js.relativeId).join(' -> '));
|
||||
}
|
||||
if (warnings.length > displayed.length) {
|
||||
rollup.stderr(`...and ${warnings.length - displayed.length} more`);
|
||||
}
|
||||
},
|
||||
EMPTY_BUNDLE(warnings) {
|
||||
title(`Generated${warnings.length === 1 ? ' an' : ''} empty ${warnings.length > 1 ? 'chunks' : 'chunk'}`);
|
||||
rollup.stderr(parseAst_js.printQuotedStringList(warnings.map(warning => warning.names[0])));
|
||||
},
|
||||
EVAL(warnings) {
|
||||
title('Use of eval is strongly discouraged');
|
||||
info(parseAst_js.getRollupUrl(parseAst_js.URL_AVOIDING_EVAL));
|
||||
showTruncatedWarnings(warnings);
|
||||
},
|
||||
MISSING_EXPORT(warnings) {
|
||||
title('Missing exports');
|
||||
info(parseAst_js.getRollupUrl(parseAst_js.URL_NAME_IS_NOT_EXPORTED));
|
||||
for (const warning of warnings) {
|
||||
rollup.stderr(rollup.bold(parseAst_js.relativeId(warning.id)));
|
||||
rollup.stderr(`${warning.binding} is not exported by ${parseAst_js.relativeId(warning.exporter)}`);
|
||||
rollup.stderr(rollup.gray(warning.frame));
|
||||
}
|
||||
},
|
||||
MISSING_GLOBAL_NAME(warnings) {
|
||||
title(`Missing global variable ${warnings.length > 1 ? 'names' : 'name'}`);
|
||||
info(parseAst_js.getRollupUrl(parseAst_js.URL_OUTPUT_GLOBALS));
|
||||
rollup.stderr(`Use "output.globals" to specify browser global variable names corresponding to external modules:`);
|
||||
for (const warning of warnings) {
|
||||
rollup.stderr(`${rollup.bold(warning.id)} (guessing "${warning.names[0]}")`);
|
||||
}
|
||||
},
|
||||
MIXED_EXPORTS(warnings) {
|
||||
title('Mixing named and default exports');
|
||||
info(parseAst_js.getRollupUrl(parseAst_js.URL_OUTPUT_EXPORTS));
|
||||
rollup.stderr(rollup.bold('The following entry modules are using named and default exports together:'));
|
||||
warnings.sort((a, b) => (a.id < b.id ? -1 : 1));
|
||||
const displayedWarnings = warnings.length > 5 ? warnings.slice(0, 3) : warnings;
|
||||
for (const warning of displayedWarnings) {
|
||||
rollup.stderr(parseAst_js.relativeId(warning.id));
|
||||
}
|
||||
if (displayedWarnings.length < warnings.length) {
|
||||
rollup.stderr(`...and ${warnings.length - displayedWarnings.length} other entry modules`);
|
||||
}
|
||||
rollup.stderr(`\nConsumers of your bundle will have to use chunk.default to access their default export, which may not be what you want. Use \`output.exports: "named"\` to disable this warning.`);
|
||||
},
|
||||
NAMESPACE_CONFLICT(warnings) {
|
||||
title(`Conflicting re-exports`);
|
||||
for (const warning of warnings) {
|
||||
rollup.stderr(`"${rollup.bold(parseAst_js.relativeId(warning.reexporter))}" re-exports "${warning.binding}" from both "${parseAst_js.relativeId(warning.ids[0])}" and "${parseAst_js.relativeId(warning.ids[1])}" (will be ignored).`);
|
||||
}
|
||||
},
|
||||
PLUGIN_WARNING(warnings) {
|
||||
const nestedByPlugin = nest(warnings, 'plugin');
|
||||
for (const { items } of nestedByPlugin) {
|
||||
const nestedByMessage = nest(items, 'message');
|
||||
let lastUrl = '';
|
||||
for (const { key: message, items } of nestedByMessage) {
|
||||
title(message);
|
||||
for (const warning of items) {
|
||||
if (warning.url && warning.url !== lastUrl)
|
||||
info((lastUrl = warning.url));
|
||||
const loc = formatLocation(warning);
|
||||
if (loc) {
|
||||
rollup.stderr(rollup.bold(loc));
|
||||
}
|
||||
if (warning.frame)
|
||||
info(warning.frame);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
SOURCEMAP_BROKEN(warnings) {
|
||||
title(`Broken sourcemap`);
|
||||
info(parseAst_js.getRollupUrl(parseAst_js.URL_SOURCEMAP_IS_LIKELY_TO_BE_INCORRECT));
|
||||
const plugins = [...new Set(warnings.map(({ plugin }) => plugin).filter(Boolean))];
|
||||
rollup.stderr(`Plugins that transform code (such as ${parseAst_js.printQuotedStringList(plugins)}) should generate accompanying sourcemaps.`);
|
||||
},
|
||||
THIS_IS_UNDEFINED(warnings) {
|
||||
title('"this" has been rewritten to "undefined"');
|
||||
info(parseAst_js.getRollupUrl(parseAst_js.URL_THIS_IS_UNDEFINED));
|
||||
showTruncatedWarnings(warnings);
|
||||
},
|
||||
UNRESOLVED_IMPORT(warnings) {
|
||||
title('Unresolved dependencies');
|
||||
info(parseAst_js.getRollupUrl(parseAst_js.URL_TREATING_MODULE_AS_EXTERNAL_DEPENDENCY));
|
||||
const dependencies = new Map();
|
||||
for (const warning of warnings) {
|
||||
rollup.getOrCreate(dependencies, parseAst_js.relativeId(warning.exporter), rollup.getNewArray).push(parseAst_js.relativeId(warning.id));
|
||||
}
|
||||
for (const [dependency, importers] of dependencies) {
|
||||
rollup.stderr(`${rollup.bold(dependency)} (imported by ${parseAst_js.printQuotedStringList(importers)})`);
|
||||
}
|
||||
},
|
||||
UNUSED_EXTERNAL_IMPORT(warnings) {
|
||||
title('Unused external imports');
|
||||
for (const warning of warnings) {
|
||||
rollup.stderr(warning.names +
|
||||
' imported from external module "' +
|
||||
warning.exporter +
|
||||
'" but never used in ' +
|
||||
parseAst_js.printQuotedStringList(warning.ids.map(parseAst_js.relativeId)) +
|
||||
'.');
|
||||
}
|
||||
}
|
||||
};
|
||||
function defaultBody(log) {
|
||||
if (log.url) {
|
||||
info(log.url);
|
||||
}
|
||||
const loc = formatLocation(log);
|
||||
if (loc) {
|
||||
rollup.stderr(rollup.bold(loc));
|
||||
}
|
||||
if (log.frame)
|
||||
info(log.frame);
|
||||
}
|
||||
function title(string_) {
|
||||
rollup.stderr(rollup.bold(rollup.yellow(`(!) ${string_}`)));
|
||||
}
|
||||
function info(url) {
|
||||
rollup.stderr(rollup.gray(url));
|
||||
}
|
||||
function nest(array, property) {
|
||||
const nested = [];
|
||||
const lookup = new Map();
|
||||
for (const item of array) {
|
||||
const key = item[property];
|
||||
rollup.getOrCreate(lookup, key, () => {
|
||||
const items = {
|
||||
items: [],
|
||||
key
|
||||
};
|
||||
nested.push(items);
|
||||
return items;
|
||||
}).items.push(item);
|
||||
}
|
||||
return nested;
|
||||
}
|
||||
function showTruncatedWarnings(warnings) {
|
||||
const nestedByModule = nest(warnings, 'id');
|
||||
const displayedByModule = nestedByModule.length > 5 ? nestedByModule.slice(0, 3) : nestedByModule;
|
||||
for (const { key: id, items } of displayedByModule) {
|
||||
rollup.stderr(rollup.bold(parseAst_js.relativeId(id)));
|
||||
rollup.stderr(rollup.gray(items[0].frame));
|
||||
if (items.length > 1) {
|
||||
rollup.stderr(`...and ${items.length - 1} other ${items.length > 2 ? 'occurrences' : 'occurrence'}`);
|
||||
}
|
||||
}
|
||||
if (nestedByModule.length > displayedByModule.length) {
|
||||
rollup.stderr(`\n...and ${nestedByModule.length - displayedByModule.length} other files`);
|
||||
}
|
||||
}
|
||||
function generateLogFilter(command) {
|
||||
const filters = rollup.ensureArray(command.filterLogs).flatMap(filter => String(filter).split(','));
|
||||
if (process.env.ROLLUP_FILTER_LOGS) {
|
||||
filters.push(...process.env.ROLLUP_FILTER_LOGS.split(','));
|
||||
}
|
||||
return getLogFilter_js.getLogFilter(filters);
|
||||
}
|
||||
function formatLocation(log) {
|
||||
const id = log.loc?.file || log.id;
|
||||
if (!id)
|
||||
return null;
|
||||
return log.loc ? `${id}:${log.loc.line}:${log.loc.column}` : id;
|
||||
}
|
||||
|
||||
const stdinName = '-';
|
||||
let stdinResult = null;
|
||||
function stdinPlugin(argument) {
|
||||
const suffix = typeof argument == 'string' && argument.length > 0 ? '.' + argument : '';
|
||||
return {
|
||||
load(id) {
|
||||
if (id === stdinName || id.startsWith(stdinName + '.')) {
|
||||
return stdinResult || (stdinResult = readStdin());
|
||||
}
|
||||
},
|
||||
name: 'stdin',
|
||||
resolveId(id) {
|
||||
if (id === stdinName) {
|
||||
return id + suffix;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
function readStdin() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks = [];
|
||||
process$1.stdin.setEncoding('utf8');
|
||||
process$1.stdin
|
||||
.on('data', chunk => chunks.push(chunk))
|
||||
.on('end', () => {
|
||||
const result = chunks.join('');
|
||||
resolve(result);
|
||||
})
|
||||
.on('error', error => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function waitForInputPlugin() {
|
||||
return {
|
||||
async buildStart(options) {
|
||||
const inputSpecifiers = Array.isArray(options.input)
|
||||
? options.input
|
||||
: Object.keys(options.input);
|
||||
let lastAwaitedSpecifier = null;
|
||||
checkSpecifiers: while (true) {
|
||||
for (const specifier of inputSpecifiers) {
|
||||
if ((await this.resolve(specifier)) === null) {
|
||||
if (lastAwaitedSpecifier !== specifier) {
|
||||
rollup.stderr(`waiting for input ${rollup.bold(specifier)}...`);
|
||||
lastAwaitedSpecifier = specifier;
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, 500));
|
||||
continue checkSpecifiers;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
},
|
||||
name: 'wait-for-input'
|
||||
};
|
||||
}
|
||||
|
||||
async function addCommandPluginsToInputOptions(inputOptions, command) {
|
||||
if (command.stdin !== false) {
|
||||
inputOptions.plugins.push(stdinPlugin(command.stdin));
|
||||
}
|
||||
if (command.waitForBundleInput === true) {
|
||||
inputOptions.plugins.push(waitForInputPlugin());
|
||||
}
|
||||
await addPluginsFromCommandOption(command.plugin, inputOptions);
|
||||
}
|
||||
async function addPluginsFromCommandOption(commandPlugin, inputOptions) {
|
||||
if (commandPlugin) {
|
||||
const plugins = await rollup.normalizePluginOption(commandPlugin);
|
||||
for (const plugin of plugins) {
|
||||
if (/[={}]/.test(plugin)) {
|
||||
// -p plugin=value
|
||||
// -p "{transform(c,i){...}}"
|
||||
await loadAndRegisterPlugin(inputOptions, plugin);
|
||||
}
|
||||
else {
|
||||
// split out plugins joined by commas
|
||||
// -p node-resolve,commonjs,buble
|
||||
for (const p of plugin.split(',')) {
|
||||
await loadAndRegisterPlugin(inputOptions, p);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
async function loadAndRegisterPlugin(inputOptions, pluginText) {
|
||||
let plugin = null;
|
||||
let pluginArgument = undefined;
|
||||
if (pluginText[0] === '{') {
|
||||
// -p "{transform(c,i){...}}"
|
||||
plugin = new Function('return ' + pluginText);
|
||||
}
|
||||
else {
|
||||
const match = pluginText.match(/^([\w./:@\\^{|}-]+)(=(.*))?$/);
|
||||
if (match) {
|
||||
// -p plugin
|
||||
// -p plugin=arg
|
||||
pluginText = match[1];
|
||||
pluginArgument = new Function('return ' + match[3])();
|
||||
}
|
||||
else {
|
||||
throw new Error(`Invalid --plugin argument format: ${JSON.stringify(pluginText)}`);
|
||||
}
|
||||
if (!/^\.|^rollup-plugin-|[/@\\]/.test(pluginText)) {
|
||||
// Try using plugin prefix variations first if applicable.
|
||||
// Prefix order is significant - left has higher precedence.
|
||||
for (const prefix of ['@rollup/plugin-', 'rollup-plugin-']) {
|
||||
try {
|
||||
plugin = await requireOrImport(prefix + pluginText);
|
||||
break;
|
||||
}
|
||||
catch {
|
||||
// if this does not work, we try requiring the actual name below
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!plugin) {
|
||||
try {
|
||||
if (pluginText[0] == '.')
|
||||
pluginText = path.resolve(pluginText);
|
||||
// Windows absolute paths must be specified as file:// protocol URL
|
||||
// Note that we do not have coverage for Windows-only code paths
|
||||
else if (/^[A-Za-z]:\\/.test(pluginText)) {
|
||||
pluginText = node_url.pathToFileURL(path.resolve(pluginText)).href;
|
||||
}
|
||||
plugin = await requireOrImport(pluginText);
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Cannot load plugin "${pluginText}": ${error.message}.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
// some plugins do not use `module.exports` for their entry point,
|
||||
// in which case we try the named default export and the plugin name
|
||||
if (typeof plugin === 'object') {
|
||||
plugin = plugin.default || plugin[getCamelizedPluginBaseName(pluginText)];
|
||||
}
|
||||
if (!plugin) {
|
||||
throw new Error(`Cannot find entry for plugin "${pluginText}". The plugin needs to export a function either as "default" or "${getCamelizedPluginBaseName(pluginText)}" for Rollup to recognize it.`);
|
||||
}
|
||||
inputOptions.plugins.push(typeof plugin === 'function' ? plugin.call(plugin, pluginArgument) : plugin);
|
||||
}
|
||||
function getCamelizedPluginBaseName(pluginText) {
|
||||
return (pluginText.match(/(@rollup\/plugin-|rollup-plugin-)(.+)$/)?.[2] || pluginText)
|
||||
.split(/[/\\]/)
|
||||
.slice(-1)[0]
|
||||
.split('.')[0]
|
||||
.split('-')
|
||||
.map((part, index) => (index === 0 || !part ? part : part[0].toUpperCase() + part.slice(1)))
|
||||
.join('');
|
||||
}
|
||||
async function requireOrImport(pluginPath) {
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
return require(pluginPath);
|
||||
}
|
||||
catch {
|
||||
return import(pluginPath);
|
||||
}
|
||||
}
|
||||
|
||||
const loadConfigFile = async (fileName, commandOptions = {}, watchMode = false) => {
|
||||
const configs = await getConfigList(getDefaultFromCjs(await getConfigFileExport(fileName, commandOptions, watchMode)), commandOptions);
|
||||
const warnings = batchWarnings(commandOptions);
|
||||
try {
|
||||
const normalizedConfigs = [];
|
||||
for (const config of configs) {
|
||||
const options = await rollup.mergeOptions(config, watchMode, commandOptions, warnings.log);
|
||||
await addCommandPluginsToInputOptions(options, commandOptions);
|
||||
normalizedConfigs.push(options);
|
||||
}
|
||||
return { options: normalizedConfigs, warnings };
|
||||
}
|
||||
catch (error_) {
|
||||
warnings.flush();
|
||||
throw error_;
|
||||
}
|
||||
};
|
||||
async function getConfigFileExport(fileName, commandOptions, watchMode) {
|
||||
if (commandOptions.configPlugin || commandOptions.bundleConfigAsCjs) {
|
||||
try {
|
||||
return await loadTranspiledConfigFile(fileName, commandOptions);
|
||||
}
|
||||
catch (error_) {
|
||||
if (error_.message.includes('not defined in ES module scope')) {
|
||||
return parseAst_js.error(parseAst_js.logCannotBundleConfigAsEsm(error_));
|
||||
}
|
||||
throw error_;
|
||||
}
|
||||
}
|
||||
let cannotLoadEsm = false;
|
||||
const handleWarning = (warning) => {
|
||||
if (warning.message.includes('To load an ES module')) {
|
||||
cannotLoadEsm = true;
|
||||
}
|
||||
};
|
||||
process$1.on('warning', handleWarning);
|
||||
try {
|
||||
const fileUrl = node_url.pathToFileURL(fileName);
|
||||
if (watchMode) {
|
||||
// We are adding the current date to allow reloads in watch mode
|
||||
fileUrl.search = `?${Date.now()}`;
|
||||
}
|
||||
return (await import(fileUrl.href)).default;
|
||||
}
|
||||
catch (error_) {
|
||||
if (cannotLoadEsm) {
|
||||
return parseAst_js.error(parseAst_js.logCannotLoadConfigAsCjs(error_));
|
||||
}
|
||||
if (error_.message.includes('not defined in ES module scope')) {
|
||||
return parseAst_js.error(parseAst_js.logCannotLoadConfigAsEsm(error_));
|
||||
}
|
||||
throw error_;
|
||||
}
|
||||
finally {
|
||||
process$1.off('warning', handleWarning);
|
||||
}
|
||||
}
|
||||
function getDefaultFromCjs(namespace) {
|
||||
return namespace.default || namespace;
|
||||
}
|
||||
function getConfigImportAttributesKey(input) {
|
||||
if (input === 'assert' || input === 'with')
|
||||
return input;
|
||||
return;
|
||||
}
|
||||
async function loadTranspiledConfigFile(fileName, commandOptions) {
|
||||
const { bundleConfigAsCjs, configPlugin, configImportAttributesKey, silent } = commandOptions;
|
||||
const warnings = batchWarnings(commandOptions);
|
||||
const inputOptions = {
|
||||
external: (id) => (id[0] !== '.' && !path.isAbsolute(id)) || id.slice(-5) === '.json',
|
||||
input: fileName,
|
||||
onwarn: warnings.add,
|
||||
plugins: [],
|
||||
treeshake: false
|
||||
};
|
||||
await addPluginsFromCommandOption(configPlugin, inputOptions);
|
||||
const bundle = await rollup.rollup(inputOptions);
|
||||
const { output: [{ code }] } = await bundle.generate({
|
||||
exports: 'named',
|
||||
format: bundleConfigAsCjs ? 'cjs' : 'es',
|
||||
importAttributesKey: getConfigImportAttributesKey(configImportAttributesKey),
|
||||
plugins: [
|
||||
{
|
||||
name: 'transpile-import-meta',
|
||||
resolveImportMeta(property, { moduleId }) {
|
||||
if (property === 'url') {
|
||||
return `'${node_url.pathToFileURL(moduleId).href}'`;
|
||||
}
|
||||
if (property == 'filename') {
|
||||
return `'${moduleId}'`;
|
||||
}
|
||||
if (property == 'dirname') {
|
||||
return `'${path.dirname(moduleId)}'`;
|
||||
}
|
||||
if (property == null) {
|
||||
return `{url:'${node_url.pathToFileURL(moduleId).href}', filename: '${moduleId}', dirname: '${path.dirname(moduleId)}'}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
if (!silent && warnings.count > 0) {
|
||||
rollup.stderr(rollup.bold(`loaded ${parseAst_js.relativeId(fileName)} with warnings`));
|
||||
warnings.flush();
|
||||
}
|
||||
return loadConfigFromWrittenFile(path.join(path.dirname(fileName), `rollup.config-${Date.now()}.${bundleConfigAsCjs ? 'cjs' : 'mjs'}`), code);
|
||||
}
|
||||
async function loadConfigFromWrittenFile(bundledFileName, bundledCode) {
|
||||
await promises.writeFile(bundledFileName, bundledCode);
|
||||
try {
|
||||
return (await import(node_url.pathToFileURL(bundledFileName).href)).default;
|
||||
}
|
||||
finally {
|
||||
promises.unlink(bundledFileName).catch(error => console.warn(error?.message || error));
|
||||
}
|
||||
}
|
||||
async function getConfigList(configFileExport, commandOptions) {
|
||||
const config = await (typeof configFileExport === 'function'
|
||||
? configFileExport(commandOptions)
|
||||
: configFileExport);
|
||||
if (Object.keys(config).length === 0) {
|
||||
return parseAst_js.error(parseAst_js.logMissingConfig());
|
||||
}
|
||||
return Array.isArray(config) ? config : [config];
|
||||
}
|
||||
|
||||
exports.addCommandPluginsToInputOptions = addCommandPluginsToInputOptions;
|
||||
exports.batchWarnings = batchWarnings;
|
||||
exports.loadConfigFile = loadConfigFile;
|
||||
exports.stdinName = stdinName;
|
||||
//# sourceMappingURL=loadConfigFile.js.map
|
||||
@@ -0,0 +1,261 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.assertSimpleType = assertSimpleType;
|
||||
exports.makeStrongCache = makeStrongCache;
|
||||
exports.makeStrongCacheSync = makeStrongCacheSync;
|
||||
exports.makeWeakCache = makeWeakCache;
|
||||
exports.makeWeakCacheSync = makeWeakCacheSync;
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
var _async = require("../gensync-utils/async.js");
|
||||
var _util = require("./util.js");
|
||||
const synchronize = gen => {
|
||||
return _gensync()(gen).sync;
|
||||
};
|
||||
function* genTrue() {
|
||||
return true;
|
||||
}
|
||||
function makeWeakCache(handler) {
|
||||
return makeCachedFunction(WeakMap, handler);
|
||||
}
|
||||
function makeWeakCacheSync(handler) {
|
||||
return synchronize(makeWeakCache(handler));
|
||||
}
|
||||
function makeStrongCache(handler) {
|
||||
return makeCachedFunction(Map, handler);
|
||||
}
|
||||
function makeStrongCacheSync(handler) {
|
||||
return synchronize(makeStrongCache(handler));
|
||||
}
|
||||
function makeCachedFunction(CallCache, handler) {
|
||||
const callCacheSync = new CallCache();
|
||||
const callCacheAsync = new CallCache();
|
||||
const futureCache = new CallCache();
|
||||
return function* cachedFunction(arg, data) {
|
||||
const asyncContext = yield* (0, _async.isAsync)();
|
||||
const callCache = asyncContext ? callCacheAsync : callCacheSync;
|
||||
const cached = yield* getCachedValueOrWait(asyncContext, callCache, futureCache, arg, data);
|
||||
if (cached.valid) return cached.value;
|
||||
const cache = new CacheConfigurator(data);
|
||||
const handlerResult = handler(arg, cache);
|
||||
let finishLock;
|
||||
let value;
|
||||
if ((0, _util.isIterableIterator)(handlerResult)) {
|
||||
value = yield* (0, _async.onFirstPause)(handlerResult, () => {
|
||||
finishLock = setupAsyncLocks(cache, futureCache, arg);
|
||||
});
|
||||
} else {
|
||||
value = handlerResult;
|
||||
}
|
||||
updateFunctionCache(callCache, cache, arg, value);
|
||||
if (finishLock) {
|
||||
futureCache.delete(arg);
|
||||
finishLock.release(value);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
}
|
||||
function* getCachedValue(cache, arg, data) {
|
||||
const cachedValue = cache.get(arg);
|
||||
if (cachedValue) {
|
||||
for (const {
|
||||
value,
|
||||
valid
|
||||
} of cachedValue) {
|
||||
if (yield* valid(data)) return {
|
||||
valid: true,
|
||||
value
|
||||
};
|
||||
}
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
value: null
|
||||
};
|
||||
}
|
||||
function* getCachedValueOrWait(asyncContext, callCache, futureCache, arg, data) {
|
||||
const cached = yield* getCachedValue(callCache, arg, data);
|
||||
if (cached.valid) {
|
||||
return cached;
|
||||
}
|
||||
if (asyncContext) {
|
||||
const cached = yield* getCachedValue(futureCache, arg, data);
|
||||
if (cached.valid) {
|
||||
const value = yield* (0, _async.waitFor)(cached.value.promise);
|
||||
return {
|
||||
valid: true,
|
||||
value
|
||||
};
|
||||
}
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
value: null
|
||||
};
|
||||
}
|
||||
function setupAsyncLocks(config, futureCache, arg) {
|
||||
const finishLock = new Lock();
|
||||
updateFunctionCache(futureCache, config, arg, finishLock);
|
||||
return finishLock;
|
||||
}
|
||||
function updateFunctionCache(cache, config, arg, value) {
|
||||
if (!config.configured()) config.forever();
|
||||
let cachedValue = cache.get(arg);
|
||||
config.deactivate();
|
||||
switch (config.mode()) {
|
||||
case "forever":
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: genTrue
|
||||
}];
|
||||
cache.set(arg, cachedValue);
|
||||
break;
|
||||
case "invalidate":
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: config.validator()
|
||||
}];
|
||||
cache.set(arg, cachedValue);
|
||||
break;
|
||||
case "valid":
|
||||
if (cachedValue) {
|
||||
cachedValue.push({
|
||||
value,
|
||||
valid: config.validator()
|
||||
});
|
||||
} else {
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: config.validator()
|
||||
}];
|
||||
cache.set(arg, cachedValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
class CacheConfigurator {
|
||||
constructor(data) {
|
||||
this._active = true;
|
||||
this._never = false;
|
||||
this._forever = false;
|
||||
this._invalidate = false;
|
||||
this._configured = false;
|
||||
this._pairs = [];
|
||||
this._data = void 0;
|
||||
this._data = data;
|
||||
}
|
||||
simple() {
|
||||
return makeSimpleConfigurator(this);
|
||||
}
|
||||
mode() {
|
||||
if (this._never) return "never";
|
||||
if (this._forever) return "forever";
|
||||
if (this._invalidate) return "invalidate";
|
||||
return "valid";
|
||||
}
|
||||
forever() {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
if (this._never) {
|
||||
throw new Error("Caching has already been configured with .never()");
|
||||
}
|
||||
this._forever = true;
|
||||
this._configured = true;
|
||||
}
|
||||
never() {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
if (this._forever) {
|
||||
throw new Error("Caching has already been configured with .forever()");
|
||||
}
|
||||
this._never = true;
|
||||
this._configured = true;
|
||||
}
|
||||
using(handler) {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
if (this._never || this._forever) {
|
||||
throw new Error("Caching has already been configured with .never or .forever()");
|
||||
}
|
||||
this._configured = true;
|
||||
const key = handler(this._data);
|
||||
const fn = (0, _async.maybeAsync)(handler, `You appear to be using an async cache handler, but Babel has been called synchronously`);
|
||||
if ((0, _async.isThenable)(key)) {
|
||||
return key.then(key => {
|
||||
this._pairs.push([key, fn]);
|
||||
return key;
|
||||
});
|
||||
}
|
||||
this._pairs.push([key, fn]);
|
||||
return key;
|
||||
}
|
||||
invalidate(handler) {
|
||||
this._invalidate = true;
|
||||
return this.using(handler);
|
||||
}
|
||||
validator() {
|
||||
const pairs = this._pairs;
|
||||
return function* (data) {
|
||||
for (const [key, fn] of pairs) {
|
||||
if (key !== (yield* fn(data))) return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
}
|
||||
deactivate() {
|
||||
this._active = false;
|
||||
}
|
||||
configured() {
|
||||
return this._configured;
|
||||
}
|
||||
}
|
||||
function makeSimpleConfigurator(cache) {
|
||||
function cacheFn(val) {
|
||||
if (typeof val === "boolean") {
|
||||
if (val) cache.forever();else cache.never();
|
||||
return;
|
||||
}
|
||||
return cache.using(() => assertSimpleType(val()));
|
||||
}
|
||||
cacheFn.forever = () => cache.forever();
|
||||
cacheFn.never = () => cache.never();
|
||||
cacheFn.using = cb => cache.using(() => assertSimpleType(cb()));
|
||||
cacheFn.invalidate = cb => cache.invalidate(() => assertSimpleType(cb()));
|
||||
return cacheFn;
|
||||
}
|
||||
function assertSimpleType(value) {
|
||||
if ((0, _async.isThenable)(value)) {
|
||||
throw new Error(`You appear to be using an async cache handler, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously handle your caching logic.`);
|
||||
}
|
||||
if (value != null && typeof value !== "string" && typeof value !== "boolean" && typeof value !== "number") {
|
||||
throw new Error("Cache keys must be either string, boolean, number, null, or undefined.");
|
||||
}
|
||||
return value;
|
||||
}
|
||||
class Lock {
|
||||
constructor() {
|
||||
this.released = false;
|
||||
this.promise = void 0;
|
||||
this._resolve = void 0;
|
||||
this.promise = new Promise(resolve => {
|
||||
this._resolve = resolve;
|
||||
});
|
||||
}
|
||||
release(value) {
|
||||
this.released = true;
|
||||
this._resolve(value);
|
||||
}
|
||||
}
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=caching.js.map
|
||||
@@ -0,0 +1,65 @@
|
||||
/**
|
||||
* @fileoverview Define the cursor which iterates tokens and comments.
|
||||
* @author Toru Nagashima
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Requirements
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
const Cursor = require("./cursor");
|
||||
const { getFirstIndex, search } = require("./utils");
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
// Exports
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* The cursor which iterates tokens and comments.
|
||||
*/
|
||||
module.exports = class ForwardTokenCommentCursor extends Cursor {
|
||||
/**
|
||||
* Initializes this cursor.
|
||||
* @param {Token[]} tokens The array of tokens.
|
||||
* @param {Comment[]} comments The array of comments.
|
||||
* @param {Object} indexMap The map from locations to indices in `tokens`.
|
||||
* @param {number} startLoc The start location of the iteration range.
|
||||
* @param {number} endLoc The end location of the iteration range.
|
||||
*/
|
||||
constructor(tokens, comments, indexMap, startLoc, endLoc) {
|
||||
super();
|
||||
this.tokens = tokens;
|
||||
this.comments = comments;
|
||||
this.tokenIndex = getFirstIndex(tokens, indexMap, startLoc);
|
||||
this.commentIndex = search(comments, startLoc);
|
||||
this.border = endLoc;
|
||||
}
|
||||
|
||||
/** @inheritdoc */
|
||||
moveNext() {
|
||||
const token =
|
||||
this.tokenIndex < this.tokens.length
|
||||
? this.tokens[this.tokenIndex]
|
||||
: null;
|
||||
const comment =
|
||||
this.commentIndex < this.comments.length
|
||||
? this.comments[this.commentIndex]
|
||||
: null;
|
||||
|
||||
if (token && (!comment || token.range[0] < comment.range[0])) {
|
||||
this.current = token;
|
||||
this.tokenIndex += 1;
|
||||
} else if (comment) {
|
||||
this.current = comment;
|
||||
this.commentIndex += 1;
|
||||
} else {
|
||||
this.current = null;
|
||||
}
|
||||
|
||||
return (
|
||||
Boolean(this.current) &&
|
||||
(this.border === -1 || this.current.range[1] <= this.border)
|
||||
);
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,27 @@
|
||||
export class BaseCanvasFactory {
|
||||
constructor({ enableHWA }: {
|
||||
enableHWA?: boolean | undefined;
|
||||
});
|
||||
create(width: any, height: any): {
|
||||
canvas: void;
|
||||
context: any;
|
||||
};
|
||||
reset(canvasAndContext: any, width: any, height: any): void;
|
||||
destroy(canvasAndContext: any): void;
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_createCanvas(width: any, height: any): void;
|
||||
#private;
|
||||
}
|
||||
export class DOMCanvasFactory extends BaseCanvasFactory {
|
||||
constructor({ ownerDocument, enableHWA }: {
|
||||
ownerDocument?: Document | undefined;
|
||||
enableHWA?: boolean | undefined;
|
||||
});
|
||||
_document: Document;
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_createCanvas(width: any, height: any): HTMLCanvasElement;
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,5 @@
|
||||
import * as React from 'react'
|
||||
|
||||
export function SafeFragment(props: any) {
|
||||
return <>{props.children}</>
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2":"K D E F A B mC"},B:{"1":"0 9 Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I","2":"C L M G N O P"},C:{"1":"0 9 ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"1 2 3 4 5 6 7 8 nC LC J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB qC rC"},D:{"1":"0 9 NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","2":"1 2 3 4 5 6 7 8 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB"},E:{"1":"M G yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","2":"J PB K D E F sC SC tC uC vC wC","132":"A B C L TC FC GC xC"},F:{"1":"0 lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z","2":"1 2 3 4 5 6 7 8 F B C G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB 4C 5C 6C 7C FC kC 8C GC"},G:{"1":"RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","2":"E SC 9C lC AD BD CD DD ED FD","132":"GD HD ID JD KD LD MD ND OD PD QD"},H:{"2":"WD"},I:{"1":"I","2":"LC J XD YD ZD aD lC bD cD"},J:{"2":"D A"},K:{"1":"H","2":"A B C FC kC GC"},L:{"1":"I"},M:{"1":"EC"},N:{"2":"A B"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 gD hD TC iD jD kD lD mD IC JC KC nD","2":"J dD eD fD"},Q:{"1":"oD"},R:{"1":"pD"},S:{"1":"qD rD"}},B:5,C:"Scroll methods on elements (scroll, scrollTo, scrollBy)",D:true};
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"names":["getEnv","defaultValue","process","env","BABEL_ENV","NODE_ENV"],"sources":["../../../src/config/helpers/environment.ts"],"sourcesContent":["export function getEnv(defaultValue: string = \"development\"): string {\n return process.env.BABEL_ENV || process.env.NODE_ENV || defaultValue;\n}\n"],"mappings":";;;;;;AAAO,SAASA,MAAMA,CAACC,YAAoB,GAAG,aAAa,EAAU;EACnE,OAAOC,OAAO,CAACC,GAAG,CAACC,SAAS,IAAIF,OAAO,CAACC,GAAG,CAACE,QAAQ,IAAIJ,YAAY;AACtE;AAAC","ignoreList":[]}
|
||||
@@ -0,0 +1,429 @@
|
||||
<h1 align="center">
|
||||
<img width="250" src="https://jaredwray.com/images/keyv.svg" alt="keyv">
|
||||
<br>
|
||||
<br>
|
||||
</h1>
|
||||
|
||||
> Simple key-value storage with support for multiple backends
|
||||
|
||||
[](https://github.com/jaredwray/keyv/actions/workflows/tests.yaml)
|
||||
[](https://codecov.io/gh/jaredwray/keyv)
|
||||
[](https://www.npmjs.com/package/keyv)
|
||||
[](https://www.npmjs.com/package/keyv)
|
||||
|
||||
Keyv provides a consistent interface for key-value storage across multiple backends via storage adapters. It supports TTL based expiry, making it suitable as a cache or a persistent key-value store.
|
||||
|
||||
## Features
|
||||
|
||||
There are a few existing modules similar to Keyv, however Keyv is different because it:
|
||||
|
||||
- Isn't bloated
|
||||
- Has a simple Promise based API
|
||||
- Suitable as a TTL based cache or persistent key-value store
|
||||
- [Easily embeddable](#add-cache-support-to-your-module) inside another module
|
||||
- Works with any storage that implements the [`Map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map) API
|
||||
- Handles all JSON types plus `Buffer`
|
||||
- Supports namespaces
|
||||
- Wide range of [**efficient, well tested**](#official-storage-adapters) storage adapters
|
||||
- Connection errors are passed through (db failures won't kill your app)
|
||||
- Supports the current active LTS version of Node.js or higher
|
||||
|
||||
## Usage
|
||||
|
||||
Install Keyv.
|
||||
|
||||
```
|
||||
npm install --save keyv
|
||||
```
|
||||
|
||||
By default everything is stored in memory, you can optionally also install a storage adapter.
|
||||
|
||||
```
|
||||
npm install --save @keyv/redis
|
||||
npm install --save @keyv/mongo
|
||||
npm install --save @keyv/sqlite
|
||||
npm install --save @keyv/postgres
|
||||
npm install --save @keyv/mysql
|
||||
npm install --save @keyv/etcd
|
||||
```
|
||||
|
||||
Create a new Keyv instance, passing your connection string if applicable. Keyv will automatically load the correct storage adapter.
|
||||
|
||||
```js
|
||||
const Keyv = require('keyv');
|
||||
|
||||
// One of the following
|
||||
const keyv = new Keyv();
|
||||
const keyv = new Keyv('redis://user:pass@localhost:6379');
|
||||
const keyv = new Keyv('mongodb://user:pass@localhost:27017/dbname');
|
||||
const keyv = new Keyv('sqlite://path/to/database.sqlite');
|
||||
const keyv = new Keyv('postgresql://user:pass@localhost:5432/dbname');
|
||||
const keyv = new Keyv('mysql://user:pass@localhost:3306/dbname');
|
||||
const keyv = new Keyv('etcd://localhost:2379');
|
||||
|
||||
// Handle DB connection errors
|
||||
keyv.on('error', err => console.log('Connection Error', err));
|
||||
|
||||
await keyv.set('foo', 'expires in 1 second', 1000); // true
|
||||
await keyv.set('foo', 'never expires'); // true
|
||||
await keyv.get('foo'); // 'never expires'
|
||||
await keyv.delete('foo'); // true
|
||||
await keyv.clear(); // undefined
|
||||
```
|
||||
|
||||
### Namespaces
|
||||
|
||||
You can namespace your Keyv instance to avoid key collisions and allow you to clear only a certain namespace while using the same database.
|
||||
|
||||
```js
|
||||
const users = new Keyv('redis://user:pass@localhost:6379', { namespace: 'users' });
|
||||
const cache = new Keyv('redis://user:pass@localhost:6379', { namespace: 'cache' });
|
||||
|
||||
await users.set('foo', 'users'); // true
|
||||
await cache.set('foo', 'cache'); // true
|
||||
await users.get('foo'); // 'users'
|
||||
await cache.get('foo'); // 'cache'
|
||||
await users.clear(); // undefined
|
||||
await users.get('foo'); // undefined
|
||||
await cache.get('foo'); // 'cache'
|
||||
```
|
||||
|
||||
### Custom Serializers
|
||||
|
||||
Keyv uses [`json-buffer`](https://github.com/dominictarr/json-buffer) for data serialization to ensure consistency across different backends.
|
||||
|
||||
You can optionally provide your own serialization functions to support extra data types or to serialize to something other than JSON.
|
||||
|
||||
```js
|
||||
const keyv = new Keyv({ serialize: JSON.stringify, deserialize: JSON.parse });
|
||||
```
|
||||
|
||||
**Warning:** Using custom serializers means you lose any guarantee of data consistency. You should do extensive testing with your serialisation functions and chosen storage engine.
|
||||
|
||||
## Official Storage Adapters
|
||||
|
||||
The official storage adapters are covered by [over 150 integration tests](https://github.com/jaredwray/keyv/actions/workflows/tests.yaml) to guarantee consistent behaviour. They are lightweight, efficient wrappers over the DB clients making use of indexes and native TTLs where available.
|
||||
|
||||
Database | Adapter | Native TTL
|
||||
---|---|---
|
||||
Redis | [@keyv/redis](https://github.com/jaredwray/keyv/tree/master/packages/redis) | Yes
|
||||
MongoDB | [@keyv/mongo](https://github.com/jaredwray/keyv/tree/master/packages/mongo) | Yes
|
||||
SQLite | [@keyv/sqlite](https://github.com/jaredwray/keyv/tree/master/packages/sqlite) | No
|
||||
PostgreSQL | [@keyv/postgres](https://github.com/jaredwray/keyv/tree/master/packages/postgres) | No
|
||||
MySQL | [@keyv/mysql](https://github.com/jaredwray/keyv/tree/master/packages/mysql) | No
|
||||
Etcd | [@keyv/etcd](https://github.com/jaredwray/keyv/tree/master/packages/etcd) | Yes
|
||||
Memcache | [@keyv/memcache](https://github.com/jaredwray/keyv/tree/master/packages/memcache) | Yes
|
||||
|
||||
## Third-party Storage Adapters
|
||||
|
||||
You can also use third-party storage adapters or build your own. Keyv will wrap these storage adapters in TTL functionality and handle complex types internally.
|
||||
|
||||
```js
|
||||
const Keyv = require('keyv');
|
||||
const myAdapter = require('./my-storage-adapter');
|
||||
|
||||
const keyv = new Keyv({ store: myAdapter });
|
||||
```
|
||||
|
||||
Any store that follows the [`Map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map) api will work.
|
||||
|
||||
```js
|
||||
new Keyv({ store: new Map() });
|
||||
```
|
||||
|
||||
For example, [`quick-lru`](https://github.com/sindresorhus/quick-lru) is a completely unrelated module that implements the Map API.
|
||||
|
||||
```js
|
||||
const Keyv = require('keyv');
|
||||
const QuickLRU = require('quick-lru');
|
||||
|
||||
const lru = new QuickLRU({ maxSize: 1000 });
|
||||
const keyv = new Keyv({ store: lru });
|
||||
```
|
||||
|
||||
The following are third-party storage adapters compatible with Keyv:
|
||||
|
||||
- [quick-lru](https://github.com/sindresorhus/quick-lru) - Simple "Least Recently Used" (LRU) cache
|
||||
- [keyv-file](https://github.com/zaaack/keyv-file) - File system storage adapter for Keyv
|
||||
- [keyv-dynamodb](https://www.npmjs.com/package/keyv-dynamodb) - DynamoDB storage adapter for Keyv
|
||||
- [keyv-lru](https://www.npmjs.com/package/keyv-lru) - LRU storage adapter for Keyv
|
||||
- [keyv-null](https://www.npmjs.com/package/keyv-null) - Null storage adapter for Keyv
|
||||
- [keyv-firestore ](https://github.com/goto-bus-stop/keyv-firestore) – Firebase Cloud Firestore adapter for Keyv
|
||||
- [keyv-mssql](https://github.com/pmorgan3/keyv-mssql) - Microsoft Sql Server adapter for Keyv
|
||||
- [keyv-azuretable](https://github.com/howlowck/keyv-azuretable) - Azure Table Storage/API adapter for Keyv
|
||||
- [keyv-arango](https://github.com/TimMikeladze/keyv-arango) - ArangoDB storage adapter for Keyv
|
||||
- [keyv-momento](https://github.com/momentohq/node-keyv-adaptor/) - Momento storage adapter for Keyv
|
||||
|
||||
## Add Cache Support to your Module
|
||||
|
||||
Keyv is designed to be easily embedded into other modules to add cache support. The recommended pattern is to expose a `cache` option in your modules options which is passed through to Keyv. Caching will work in memory by default and users have the option to also install a Keyv storage adapter and pass in a connection string, or any other storage that implements the `Map` API.
|
||||
|
||||
You should also set a namespace for your module so you can safely call `.clear()` without clearing unrelated app data.
|
||||
|
||||
Inside your module:
|
||||
|
||||
```js
|
||||
class AwesomeModule {
|
||||
constructor(opts) {
|
||||
this.cache = new Keyv({
|
||||
uri: typeof opts.cache === 'string' && opts.cache,
|
||||
store: typeof opts.cache !== 'string' && opts.cache,
|
||||
namespace: 'awesome-module'
|
||||
});
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Now it can be consumed like this:
|
||||
|
||||
```js
|
||||
const AwesomeModule = require('awesome-module');
|
||||
|
||||
// Caches stuff in memory by default
|
||||
const awesomeModule = new AwesomeModule();
|
||||
|
||||
// After npm install --save keyv-redis
|
||||
const awesomeModule = new AwesomeModule({ cache: 'redis://localhost' });
|
||||
|
||||
// Some third-party module that implements the Map API
|
||||
const awesomeModule = new AwesomeModule({ cache: some3rdPartyStore });
|
||||
```
|
||||
|
||||
## Compression
|
||||
|
||||
Keyv supports `gzip` and `brotli` compression. To enable compression, pass the `compress` option to the constructor.
|
||||
|
||||
```js
|
||||
const KeyvGzip = require('@keyv/compress-gzip');
|
||||
const Keyv = require('keyv');
|
||||
|
||||
const keyvGzip = new KeyvGzip();
|
||||
const keyv = new Keyv({ compression: KeyvGzip });
|
||||
```
|
||||
|
||||
You can also pass a custom compression function to the `compression` option. Following the pattern of the official compression adapters.
|
||||
|
||||
### Want to build your own?
|
||||
|
||||
Great! Keyv is designed to be easily extended. You can build your own compression adapter by following the pattern of the official compression adapters based on this interface:
|
||||
|
||||
```typescript
|
||||
interface CompressionAdapter {
|
||||
async compress(value: any, options?: any);
|
||||
async decompress(value: any, options?: any);
|
||||
async serialize(value: any);
|
||||
async deserialize(value: any);
|
||||
}
|
||||
```
|
||||
|
||||
In addition to the interface, you can test it with our compression test suite using @keyv/test-suite:
|
||||
|
||||
```js
|
||||
const {keyvCompresstionTests} = require('@keyv/test-suite');
|
||||
const KeyvGzip = require('@keyv/compress-gzip');
|
||||
|
||||
keyvCompresstionTests(test, new KeyvGzip());
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### new Keyv([uri], [options])
|
||||
|
||||
Returns a new Keyv instance.
|
||||
|
||||
The Keyv instance is also an `EventEmitter` that will emit an `'error'` event if the storage adapter connection fails.
|
||||
|
||||
### uri
|
||||
|
||||
Type: `String`<br>
|
||||
Default: `undefined`
|
||||
|
||||
The connection string URI.
|
||||
|
||||
Merged into the options object as options.uri.
|
||||
|
||||
### options
|
||||
|
||||
Type: `Object`
|
||||
|
||||
The options object is also passed through to the storage adapter. Check your storage adapter docs for any extra options.
|
||||
|
||||
#### options.namespace
|
||||
|
||||
Type: `String`<br>
|
||||
Default: `'keyv'`
|
||||
|
||||
Namespace for the current instance.
|
||||
|
||||
#### options.ttl
|
||||
|
||||
Type: `Number`<br>
|
||||
Default: `undefined`
|
||||
|
||||
Default TTL. Can be overridden by specififying a TTL on `.set()`.
|
||||
|
||||
#### options.compression
|
||||
|
||||
Type: `@keyv/compress-<compression_package_name>`<br>
|
||||
Default: `undefined`
|
||||
|
||||
Compression package to use. See [Compression](#compression) for more details.
|
||||
|
||||
#### options.serialize
|
||||
|
||||
Type: `Function`<br>
|
||||
Default: `JSONB.stringify`
|
||||
|
||||
A custom serialization function.
|
||||
|
||||
#### options.deserialize
|
||||
|
||||
Type: `Function`<br>
|
||||
Default: `JSONB.parse`
|
||||
|
||||
A custom deserialization function.
|
||||
|
||||
#### options.store
|
||||
|
||||
Type: `Storage adapter instance`<br>
|
||||
Default: `new Map()`
|
||||
|
||||
The storage adapter instance to be used by Keyv.
|
||||
|
||||
#### options.adapter
|
||||
|
||||
Type: `String`<br>
|
||||
Default: `undefined`
|
||||
|
||||
Specify an adapter to use. e.g `'redis'` or `'mongodb'`.
|
||||
|
||||
### Instance
|
||||
|
||||
Keys must always be strings. Values can be of any type.
|
||||
|
||||
#### .set(key, value, [ttl])
|
||||
|
||||
Set a value.
|
||||
|
||||
By default keys are persistent. You can set an expiry TTL in milliseconds.
|
||||
|
||||
Returns a promise which resolves to `true`.
|
||||
|
||||
#### .get(key, [options])
|
||||
|
||||
Returns a promise which resolves to the retrieved value.
|
||||
|
||||
##### options.raw
|
||||
|
||||
Type: `Boolean`<br>
|
||||
Default: `false`
|
||||
|
||||
If set to true the raw DB object Keyv stores internally will be returned instead of just the value.
|
||||
|
||||
This contains the TTL timestamp.
|
||||
|
||||
#### .delete(key)
|
||||
|
||||
Deletes an entry.
|
||||
|
||||
Returns a promise which resolves to `true` if the key existed, `false` if not.
|
||||
|
||||
#### .clear()
|
||||
|
||||
Delete all entries in the current namespace.
|
||||
|
||||
Returns a promise which is resolved when the entries have been cleared.
|
||||
|
||||
#### .iterator()
|
||||
|
||||
Iterate over all entries of the current namespace.
|
||||
|
||||
Returns a iterable that can be iterated by for-of loops. For example:
|
||||
|
||||
```js
|
||||
// please note that the "await" keyword should be used here
|
||||
for await (const [key, value] of this.keyv.iterator()) {
|
||||
console.log(key, value);
|
||||
};
|
||||
```
|
||||
|
||||
# How to Contribute
|
||||
|
||||
In this section of the documentation we will cover:
|
||||
|
||||
1) How to set up this repository locally
|
||||
2) How to get started with running commands
|
||||
3) How to contribute changes using Pull Requests
|
||||
|
||||
## Dependencies
|
||||
|
||||
This package requires the following dependencies to run:
|
||||
|
||||
1) [Yarn V1](https://yarnpkg.com/getting-started/install)
|
||||
3) [Docker](https://docs.docker.com/get-docker/)
|
||||
|
||||
## Setting up your workspace
|
||||
|
||||
To contribute to this repository, start by setting up this project locally:
|
||||
|
||||
1) Fork this repository into your Git account
|
||||
2) Clone the forked repository to your local directory using `git clone`
|
||||
3) Install any of the above missing dependencies
|
||||
|
||||
## Launching the project
|
||||
|
||||
Once the project is installed locally, you are ready to start up its services:
|
||||
|
||||
1) Ensure that your Docker service is running.
|
||||
2) From the root directory of your project, run the `yarn` command in the command prompt to install yarn.
|
||||
3) Run the `yarn bootstrap` command to install any necessary dependencies.
|
||||
4) Run `yarn test:services:start` to start up this project's Docker container. The container will launch all services within your workspace.
|
||||
|
||||
## Available Commands
|
||||
|
||||
Once the project is running, you can execute a variety of commands. The root workspace and each subpackage contain a `package.json` file with a `scripts` field listing all the commands that can be executed from that directory. This project also supports native `yarn`, and `docker` commands.
|
||||
|
||||
Here, we'll cover the primary commands that can be executed from the root directory. Unless otherwise noted, these commands can also be executed from a subpackage. If executed from a subpackage, they will only affect that subpackage, rather than the entire workspace.
|
||||
|
||||
### `yarn`
|
||||
|
||||
The `yarn` command installs yarn in the workspace.
|
||||
|
||||
### `yarn bootstrap`
|
||||
|
||||
The `yarn bootstrap` command installs all dependencies in the workspace.
|
||||
|
||||
### `yarn test:services:start`
|
||||
|
||||
The `yarn test:services:start` command starts up the project's Docker container, launching all services in the workspace. This command must be executed from the root directory.
|
||||
|
||||
### `yarn test:services:stop`
|
||||
|
||||
The `yarn test:services:stop` command brings down the project's Docker container, halting all services. This command must be executed from the root directory.
|
||||
|
||||
### `yarn test`
|
||||
|
||||
The `yarn test` command runs all tests in the workspace.
|
||||
|
||||
### `yarn clean`
|
||||
|
||||
The `yarn clean` command removes yarn and all dependencies installed by yarn. After executing this command, you must repeat the steps in *Setting up your workspace* to rebuild your workspace.
|
||||
|
||||
## Contributing Changes
|
||||
|
||||
Now that you've set up your workspace, you're ready to contribute changes to the `keyv` repository.
|
||||
|
||||
1) Make any changes that you would like to contribute in your local workspace.
|
||||
2) After making these changes, ensure that the project's tests still pass by executing the `yarn test` command in the root directory.
|
||||
3) Commit your changes and push them to your forked repository.
|
||||
4) Navigate to the original `keyv` repository and go the *Pull Requests* tab.
|
||||
5) Click the *New pull request* button, and open a pull request for the branch in your repository that contains your changes.
|
||||
6) Once your pull request is created, ensure that all checks have passed and that your branch has no conflicts with the base branch. If there are any issues, resolve these changes in your local repository, and then commit and push them to git.
|
||||
7) Similarly, respond to any reviewer comments or requests for changes by making edits to your local repository and pushing them to Git.
|
||||
8) Once the pull request has been reviewed, those with write access to the branch will be able to merge your changes into the `keyv` repository.
|
||||
|
||||
If you need more information on the steps to create a pull request, you can find a detailed walkthrough in the [Github documentation](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork)
|
||||
|
||||
## License
|
||||
|
||||
MIT © Jared Wray
|
||||
@@ -0,0 +1,176 @@
|
||||
/// <reference types="node" resolution-mode="require"/>
|
||||
/**
|
||||
* A class representing the Node.js implementation of Hfs.
|
||||
* @implements {HfsImpl}
|
||||
*/
|
||||
export class NodeHfsImpl implements HfsImpl {
|
||||
/**
|
||||
* Creates a new instance.
|
||||
* @param {object} [options] The options for the instance.
|
||||
* @param {Fsp} [options.fsp] The file system module to use.
|
||||
*/
|
||||
constructor({ fsp }?: {
|
||||
fsp?: Fsp;
|
||||
});
|
||||
/**
|
||||
* Reads a file and returns the contents as an Uint8Array.
|
||||
* @param {string|URL} filePath The path to the file to read.
|
||||
* @returns {Promise<Uint8Array|undefined>} A promise that resolves with the contents
|
||||
* of the file or undefined if the file doesn't exist.
|
||||
* @throws {Error} If the file cannot be read.
|
||||
* @throws {TypeError} If the file path is not a string.
|
||||
*/
|
||||
bytes(filePath: string | URL): Promise<Uint8Array | undefined>;
|
||||
/**
|
||||
* Writes a value to a file. If the value is a string, UTF-8 encoding is used.
|
||||
* @param {string|URL} filePath The path to the file to write.
|
||||
* @param {Uint8Array} contents The contents to write to the
|
||||
* file.
|
||||
* @returns {Promise<void>} A promise that resolves when the file is
|
||||
* written.
|
||||
* @throws {TypeError} If the file path is not a string.
|
||||
* @throws {Error} If the file cannot be written.
|
||||
*/
|
||||
write(filePath: string | URL, contents: Uint8Array): Promise<void>;
|
||||
/**
|
||||
* Appends a value to a file. If the value is a string, UTF-8 encoding is used.
|
||||
* @param {string|URL} filePath The path to the file to append to.
|
||||
* @param {Uint8Array} contents The contents to append to the
|
||||
* file.
|
||||
* @returns {Promise<void>} A promise that resolves when the file is
|
||||
* written.
|
||||
* @throws {TypeError} If the file path is not a string.
|
||||
* @throws {Error} If the file cannot be appended to.
|
||||
*/
|
||||
append(filePath: string | URL, contents: Uint8Array): Promise<void>;
|
||||
/**
|
||||
* Checks if a file exists.
|
||||
* @param {string|URL} filePath The path to the file to check.
|
||||
* @returns {Promise<boolean>} A promise that resolves with true if the
|
||||
* file exists or false if it does not.
|
||||
* @throws {Error} If the operation fails with a code other than ENOENT.
|
||||
*/
|
||||
isFile(filePath: string | URL): Promise<boolean>;
|
||||
/**
|
||||
* Checks if a directory exists.
|
||||
* @param {string|URL} dirPath The path to the directory to check.
|
||||
* @returns {Promise<boolean>} A promise that resolves with true if the
|
||||
* directory exists or false if it does not.
|
||||
* @throws {Error} If the operation fails with a code other than ENOENT.
|
||||
*/
|
||||
isDirectory(dirPath: string | URL): Promise<boolean>;
|
||||
/**
|
||||
* Creates a directory recursively.
|
||||
* @param {string|URL} dirPath The path to the directory to create.
|
||||
* @returns {Promise<void>} A promise that resolves when the directory is
|
||||
* created.
|
||||
*/
|
||||
createDirectory(dirPath: string | URL): Promise<void>;
|
||||
/**
|
||||
* Deletes a file or empty directory.
|
||||
* @param {string|URL} fileOrDirPath The path to the file or directory to
|
||||
* delete.
|
||||
* @returns {Promise<boolean>} A promise that resolves when the file or
|
||||
* directory is deleted, true if the file or directory is deleted, false
|
||||
* if the file or directory does not exist.
|
||||
* @throws {TypeError} If the file or directory path is not a string.
|
||||
* @throws {Error} If the file or directory cannot be deleted.
|
||||
*/
|
||||
delete(fileOrDirPath: string | URL): Promise<boolean>;
|
||||
/**
|
||||
* Deletes a file or directory recursively.
|
||||
* @param {string|URL} fileOrDirPath The path to the file or directory to
|
||||
* delete.
|
||||
* @returns {Promise<boolean>} A promise that resolves when the file or
|
||||
* directory is deleted, true if the file or directory is deleted, false
|
||||
* if the file or directory does not exist.
|
||||
* @throws {TypeError} If the file or directory path is not a string.
|
||||
* @throws {Error} If the file or directory cannot be deleted.
|
||||
*/
|
||||
deleteAll(fileOrDirPath: string | URL): Promise<boolean>;
|
||||
/**
|
||||
* Returns a list of directory entries for the given path.
|
||||
* @param {string|URL} dirPath The path to the directory to read.
|
||||
* @returns {AsyncIterable<HfsDirectoryEntry>} A promise that resolves with the
|
||||
* directory entries.
|
||||
* @throws {TypeError} If the directory path is not a string.
|
||||
* @throws {Error} If the directory cannot be read.
|
||||
*/
|
||||
list(dirPath: string | URL): AsyncIterable<HfsDirectoryEntry>;
|
||||
/**
|
||||
* Returns the size of a file. This method handles ENOENT errors
|
||||
* and returns undefined in that case.
|
||||
* @param {string|URL} filePath The path to the file to read.
|
||||
* @returns {Promise<number|undefined>} A promise that resolves with the size of the
|
||||
* file in bytes or undefined if the file doesn't exist.
|
||||
*/
|
||||
size(filePath: string | URL): Promise<number | undefined>;
|
||||
/**
|
||||
* Returns the last modified date of a file or directory. This method handles ENOENT errors
|
||||
* and returns undefined in that case.
|
||||
* @param {string|URL} fileOrDirPath The path to the file to read.
|
||||
* @returns {Promise<Date|undefined>} A promise that resolves with the last modified
|
||||
* date of the file or directory, or undefined if the file doesn't exist.
|
||||
*/
|
||||
lastModified(fileOrDirPath: string | URL): Promise<Date | undefined>;
|
||||
/**
|
||||
* Copies a file from one location to another.
|
||||
* @param {string|URL} source The path to the file to copy.
|
||||
* @param {string|URL} destination The path to copy the file to.
|
||||
* @returns {Promise<void>} A promise that resolves when the file is copied.
|
||||
* @throws {Error} If the source file does not exist.
|
||||
* @throws {Error} If the source file is a directory.
|
||||
* @throws {Error} If the destination file is a directory.
|
||||
*/
|
||||
copy(source: string | URL, destination: string | URL): Promise<void>;
|
||||
/**
|
||||
* Copies a file or directory from one location to another.
|
||||
* @param {string|URL} source The path to the file or directory to copy.
|
||||
* @param {string|URL} destination The path to copy the file or directory to.
|
||||
* @returns {Promise<void>} A promise that resolves when the file or directory is
|
||||
* copied.
|
||||
* @throws {Error} If the source file or directory does not exist.
|
||||
* @throws {Error} If the destination file or directory is a directory.
|
||||
*/
|
||||
copyAll(source: string | URL, destination: string | URL): Promise<void>;
|
||||
/**
|
||||
* Moves a file from the source path to the destination path.
|
||||
* @param {string|URL} source The location of the file to move.
|
||||
* @param {string|URL} destination The destination of the file to move.
|
||||
* @returns {Promise<void>} A promise that resolves when the move is complete.
|
||||
* @throws {TypeError} If the file paths are not strings.
|
||||
* @throws {Error} If the file cannot be moved.
|
||||
*/
|
||||
move(source: string | URL, destination: string | URL): Promise<void>;
|
||||
/**
|
||||
* Moves a file or directory from the source path to the destination path.
|
||||
* @param {string|URL} source The location of the file or directory to move.
|
||||
* @param {string|URL} destination The destination of the file or directory to move.
|
||||
* @returns {Promise<void>} A promise that resolves when the move is complete.
|
||||
* @throws {TypeError} If the file paths are not strings.
|
||||
* @throws {Error} If the file or directory cannot be moved.
|
||||
*/
|
||||
moveAll(source: string | URL, destination: string | URL): Promise<void>;
|
||||
#private;
|
||||
}
|
||||
/**
|
||||
* A class representing a file system utility library.
|
||||
* @implements {HfsImpl}
|
||||
*/
|
||||
export class NodeHfs extends Hfs implements HfsImpl {
|
||||
/**
|
||||
* Creates a new instance.
|
||||
* @param {object} [options] The options for the instance.
|
||||
* @param {Fsp} [options.fsp] The file system module to use.
|
||||
*/
|
||||
constructor({ fsp }?: {
|
||||
fsp?: Fsp;
|
||||
});
|
||||
}
|
||||
export const hfs: NodeHfs;
|
||||
export type HfsImpl = import("@humanfs/types").HfsImpl;
|
||||
export type HfsDirectoryEntry = import("@humanfs/types").HfsDirectoryEntry;
|
||||
export type Fsp = typeof nativeFsp;
|
||||
export type Dirent = import("fs").Dirent;
|
||||
import { Hfs } from "@humanfs/core";
|
||||
import nativeFsp from "node:fs/promises";
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2":"K D E F A B mC"},B:{"1":"0 9 p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I","2":"C L M G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n","516":"o"},C:{"1":"0 9 t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC","2":"1 2 3 4 5 6 7 8 nC LC J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s qC rC"},D:{"1":"0 9 p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC","2":"1 2 3 4 5 6 7 8 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a","194":"c d e f g h i j k l m n","450":"b","516":"o"},E:{"1":"IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","2":"J PB K D E F A B C L M G sC SC tC uC vC wC TC FC GC xC yC zC UC VC HC 0C"},F:{"1":"0 d e f g h i j k l m n o p q r s t u v w x y z","2":"1 2 3 4 5 6 7 8 F B C G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC 4C 5C 6C 7C FC kC 8C GC","194":"Q H R OC S T U V W X Y Z","516":"a b c"},G:{"1":"IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","2":"E SC 9C lC AD BD CD DD ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD"},H:{"2":"WD"},I:{"1":"I","2":"LC J XD YD ZD aD lC bD cD"},J:{"2":"D A"},K:{"1":"H","2":"A B C FC kC GC"},L:{"1":"I"},M:{"1":"EC"},N:{"2":"A B"},O:{"2":"HC"},P:{"1":"1 2 3 4 5 6 7 8","2":"J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"2":"oD"},R:{"2":"pD"},S:{"2":"qD rD"}},B:5,C:"CSS Container Queries (Size)",D:true};
|
||||
@@ -0,0 +1,8 @@
|
||||
import { Parser } from "../index.js";
|
||||
|
||||
export declare const parsers: {
|
||||
__ng_action: Parser;
|
||||
__ng_binding: Parser;
|
||||
__ng_directive: Parser;
|
||||
__ng_interpolation: Parser;
|
||||
};
|
||||
Reference in New Issue
Block a user