update
This commit is contained in:
@@ -0,0 +1,605 @@
|
||||
// This is written to exactly parse the `font` shorthand in CSS2:
|
||||
// https://www.w3.org/TR/CSS22/fonts.html#font-shorthand
|
||||
// https://www.w3.org/TR/CSS22/syndata.html#tokenization
|
||||
//
|
||||
// We may want to update it for CSS 3 (e.g. font-stretch, or updated
|
||||
// tokenization) but I've only ever seen one or two issues filed in node-canvas
|
||||
// due to parsing in my 8 years on the project
|
||||
|
||||
#include "FontParser.h"
|
||||
#include "CharData.h"
|
||||
#include <cctype>
|
||||
#include <unordered_map>
|
||||
|
||||
Token::Token(Type type, std::string value) : type_(type), value_(std::move(value)) {}
|
||||
|
||||
Token::Token(Type type, double value) : type_(type), value_(value) {}
|
||||
|
||||
Token::Token(Type type) : type_(type), value_(std::string{}) {}
|
||||
|
||||
const std::string&
|
||||
Token::getString() const {
|
||||
static const std::string empty;
|
||||
auto* str = std::get_if<std::string>(&value_);
|
||||
return str ? *str : empty;
|
||||
}
|
||||
|
||||
double
|
||||
Token::getNumber() const {
|
||||
auto* num = std::get_if<double>(&value_);
|
||||
return num ? *num : 0.0f;
|
||||
}
|
||||
|
||||
Tokenizer::Tokenizer(std::string_view input) : input_(input) {}
|
||||
|
||||
std::string
|
||||
Tokenizer::utf8Encode(uint32_t codepoint) {
|
||||
std::string result;
|
||||
|
||||
if (codepoint < 0x80) {
|
||||
result += static_cast<char>(codepoint);
|
||||
} else if (codepoint < 0x800) {
|
||||
result += static_cast<char>((codepoint >> 6) | 0xc0);
|
||||
result += static_cast<char>((codepoint & 0x3f) | 0x80);
|
||||
} else if (codepoint < 0x10000) {
|
||||
result += static_cast<char>((codepoint >> 12) | 0xe0);
|
||||
result += static_cast<char>(((codepoint >> 6) & 0x3f) | 0x80);
|
||||
result += static_cast<char>((codepoint & 0x3f) | 0x80);
|
||||
} else {
|
||||
result += static_cast<char>((codepoint >> 18) | 0xf0);
|
||||
result += static_cast<char>(((codepoint >> 12) & 0x3f) | 0x80);
|
||||
result += static_cast<char>(((codepoint >> 6) & 0x3f) | 0x80);
|
||||
result += static_cast<char>((codepoint & 0x3f) | 0x80);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
char
|
||||
Tokenizer::peek() const {
|
||||
return position_ < input_.length() ? input_[position_] : '\0';
|
||||
}
|
||||
|
||||
char
|
||||
Tokenizer::advance() {
|
||||
return position_ < input_.length() ? input_[position_++] : '\0';
|
||||
}
|
||||
|
||||
Token
|
||||
Tokenizer::parseNumber() {
|
||||
enum class State {
|
||||
Start,
|
||||
AfterSign,
|
||||
Digits,
|
||||
AfterDecimal,
|
||||
AfterE,
|
||||
AfterESign,
|
||||
ExponentDigits
|
||||
};
|
||||
|
||||
size_t start = position_;
|
||||
size_t ePosition = 0;
|
||||
State state = State::Start;
|
||||
bool valid = false;
|
||||
|
||||
while (position_ < input_.length()) {
|
||||
char c = peek();
|
||||
uint8_t flags = charData[static_cast<uint8_t>(c)];
|
||||
|
||||
switch (state) {
|
||||
case State::Start:
|
||||
if (flags & CharData::Sign) {
|
||||
position_++;
|
||||
state = State::AfterSign;
|
||||
} else if (flags & CharData::Digit) {
|
||||
position_++;
|
||||
state = State::Digits;
|
||||
valid = true;
|
||||
} else if (c == '.') {
|
||||
position_++;
|
||||
state = State::AfterDecimal;
|
||||
} else {
|
||||
goto done;
|
||||
}
|
||||
break;
|
||||
|
||||
case State::AfterSign:
|
||||
if (flags & CharData::Digit) {
|
||||
position_++;
|
||||
state = State::Digits;
|
||||
valid = true;
|
||||
} else if (c == '.') {
|
||||
position_++;
|
||||
state = State::AfterDecimal;
|
||||
} else {
|
||||
goto done;
|
||||
}
|
||||
break;
|
||||
|
||||
case State::Digits:
|
||||
if (flags & CharData::Digit) {
|
||||
position_++;
|
||||
} else if (c == '.') {
|
||||
position_++;
|
||||
state = State::AfterDecimal;
|
||||
} else if (c == 'e' || c == 'E') {
|
||||
ePosition = position_;
|
||||
position_++;
|
||||
state = State::AfterE;
|
||||
valid = false;
|
||||
} else {
|
||||
goto done;
|
||||
}
|
||||
break;
|
||||
|
||||
case State::AfterDecimal:
|
||||
if (flags & CharData::Digit) {
|
||||
position_++;
|
||||
valid = true;
|
||||
state = State::Digits;
|
||||
} else {
|
||||
goto done;
|
||||
}
|
||||
break;
|
||||
|
||||
case State::AfterE:
|
||||
if (flags & CharData::Sign) {
|
||||
position_++;
|
||||
state = State::AfterESign;
|
||||
} else if (flags & CharData::Digit) {
|
||||
position_++;
|
||||
valid = true;
|
||||
state = State::ExponentDigits;
|
||||
} else {
|
||||
position_ = ePosition;
|
||||
valid = true;
|
||||
goto done;
|
||||
}
|
||||
break;
|
||||
|
||||
case State::AfterESign:
|
||||
if (flags & CharData::Digit) {
|
||||
position_++;
|
||||
valid = true;
|
||||
state = State::ExponentDigits;
|
||||
} else {
|
||||
position_ = ePosition;
|
||||
valid = true;
|
||||
goto done;
|
||||
}
|
||||
break;
|
||||
|
||||
case State::ExponentDigits:
|
||||
if (flags & CharData::Digit) {
|
||||
position_++;
|
||||
} else {
|
||||
goto done;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
done:
|
||||
if (!valid) {
|
||||
position_ = start;
|
||||
return Token(Token::Type::Invalid);
|
||||
}
|
||||
|
||||
std::string number_str(input_.substr(start, position_ - start));
|
||||
double value = std::stod(number_str);
|
||||
return Token(Token::Type::Number, value);
|
||||
}
|
||||
|
||||
// Note that identifiers are always lower-case. This helps us make easier/more
|
||||
// efficient comparisons, but means that font-families specified as identifiers
|
||||
// will be lower-cased. Since font selection isn't case sensitive, this
|
||||
// shouldn't ever be a problem.
|
||||
Token
|
||||
Tokenizer::parseIdentifier() {
|
||||
std::string identifier;
|
||||
auto flags = CharData::Nmstart;
|
||||
auto start = position_;
|
||||
|
||||
while (position_ < input_.length()) {
|
||||
char c = peek();
|
||||
|
||||
if (c == '\\') {
|
||||
advance();
|
||||
if (!parseEscape(identifier)) {
|
||||
position_ = start;
|
||||
return Token(Token::Type::Invalid);
|
||||
}
|
||||
flags = CharData::Nmchar;
|
||||
} else if (charData[static_cast<uint8_t>(c)] & flags) {
|
||||
identifier += advance() + (c >= 'A' && c <= 'Z' ? 32 : 0);
|
||||
flags = CharData::Nmchar;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return Token(Token::Type::Identifier, identifier);
|
||||
}
|
||||
|
||||
uint32_t
|
||||
Tokenizer::parseUnicode() {
|
||||
uint32_t value = 0;
|
||||
size_t count = 0;
|
||||
|
||||
while (position_ < input_.length() && count < 6) {
|
||||
char c = peek();
|
||||
uint32_t digit;
|
||||
|
||||
if (c >= '0' && c <= '9') {
|
||||
digit = c - '0';
|
||||
} else if (c >= 'a' && c <= 'f') {
|
||||
digit = c - 'a' + 10;
|
||||
} else if (c >= 'A' && c <= 'F') {
|
||||
digit = c - 'A' + 10;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
|
||||
value = value * 16 + digit;
|
||||
advance();
|
||||
count++;
|
||||
}
|
||||
|
||||
// Optional whitespace after hex escape
|
||||
char c = peek();
|
||||
if (c == '\r') {
|
||||
advance();
|
||||
if (peek() == '\n') advance();
|
||||
} else if (isWhitespace(c)) {
|
||||
advance();
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
bool
|
||||
Tokenizer::parseEscape(std::string& str) {
|
||||
char c = peek();
|
||||
auto flags = charData[static_cast<uint8_t>(c)];
|
||||
|
||||
if (flags & CharData::Hex) {
|
||||
str += utf8Encode(parseUnicode());
|
||||
return true;
|
||||
} else if (!(flags & CharData::Newline) && !(flags & CharData::Hex)) {
|
||||
str += advance();
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
Token
|
||||
Tokenizer::parseString(char quote) {
|
||||
advance();
|
||||
std::string value;
|
||||
auto start = position_;
|
||||
|
||||
while (position_ < input_.length()) {
|
||||
char c = peek();
|
||||
|
||||
if (c == quote) {
|
||||
advance();
|
||||
return Token(Token::Type::QuotedString, value);
|
||||
} else if (c == '\\') {
|
||||
advance();
|
||||
c = peek();
|
||||
if (c == '\r') {
|
||||
advance();
|
||||
if (peek() == '\n') advance();
|
||||
} else if (isNewline(c)) {
|
||||
advance();
|
||||
} else {
|
||||
if (!parseEscape(value)) {
|
||||
position_ = start;
|
||||
return Token(Token::Type::Invalid);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
value += advance();
|
||||
}
|
||||
}
|
||||
|
||||
position_ = start;
|
||||
return Token(Token::Type::Invalid);
|
||||
}
|
||||
|
||||
Token
|
||||
Tokenizer::nextToken() {
|
||||
if (position_ >= input_.length()) {
|
||||
return Token(Token::Type::EndOfInput);
|
||||
}
|
||||
|
||||
char c = peek();
|
||||
auto flags = charData[static_cast<uint8_t>(c)];
|
||||
|
||||
if (isWhitespace(c)) {
|
||||
std::string whitespace;
|
||||
while (position_ < input_.length() && isWhitespace(peek())) {
|
||||
whitespace += advance();
|
||||
}
|
||||
return Token(Token::Type::Whitespace, whitespace);
|
||||
}
|
||||
|
||||
if (flags & CharData::NumStart) {
|
||||
Token token = parseNumber();
|
||||
if (token.type() != Token::Type::Invalid) return token;
|
||||
}
|
||||
|
||||
if (flags & CharData::Nmstart) {
|
||||
Token token = parseIdentifier();
|
||||
if (token.type() != Token::Type::Invalid) return token;
|
||||
}
|
||||
|
||||
if (c == '"') {
|
||||
Token token = parseString('"');
|
||||
if (token.type() != Token::Type::Invalid) return token;
|
||||
}
|
||||
|
||||
if (c == '\'') {
|
||||
Token token = parseString('\'');
|
||||
if (token.type() != Token::Type::Invalid) return token;
|
||||
}
|
||||
|
||||
switch (advance()) {
|
||||
case '/': return Token(Token::Type::Slash);
|
||||
case ',': return Token(Token::Type::Comma);
|
||||
case '%': return Token(Token::Type::Percent);
|
||||
default: return Token(Token::Type::Invalid);
|
||||
}
|
||||
}
|
||||
|
||||
FontParser::FontParser(std::string_view input)
|
||||
: tokenizer_(input)
|
||||
, currentToken_(tokenizer_.nextToken())
|
||||
, nextToken_(tokenizer_.nextToken()) {}
|
||||
|
||||
const std::unordered_map<std::string, uint16_t> FontParser::weightMap = {
|
||||
{"normal", 400},
|
||||
{"bold", 700},
|
||||
{"lighter", 100},
|
||||
{"bolder", 700}
|
||||
};
|
||||
|
||||
const std::unordered_map<std::string, double> FontParser::unitMap = {
|
||||
{"cm", 37.8f},
|
||||
{"mm", 3.78f},
|
||||
{"in", 96.0f},
|
||||
{"pt", 96.0f / 72.0f},
|
||||
{"pc", 96.0f / 6.0f},
|
||||
{"em", 16.0f},
|
||||
{"px", 1.0f}
|
||||
};
|
||||
|
||||
void
|
||||
FontParser::advance() {
|
||||
currentToken_ = nextToken_;
|
||||
nextToken_ = tokenizer_.nextToken();
|
||||
}
|
||||
|
||||
void
|
||||
FontParser::skipWs() {
|
||||
while (currentToken_.type() == Token::Type::Whitespace) advance();
|
||||
}
|
||||
|
||||
bool
|
||||
FontParser::check(Token::Type type) const {
|
||||
return currentToken_.type() == type;
|
||||
}
|
||||
|
||||
bool
|
||||
FontParser::checkWs() const {
|
||||
return nextToken_.type() == Token::Type::Whitespace
|
||||
|| nextToken_.type() == Token::Type::EndOfInput;
|
||||
}
|
||||
|
||||
bool
|
||||
FontParser::parseFontStyle(FontProperties& props) {
|
||||
if (check(Token::Type::Identifier)) {
|
||||
const auto& value = currentToken_.getString();
|
||||
if (value == "italic") {
|
||||
props.fontStyle = FontStyle::Italic;
|
||||
advance();
|
||||
return true;
|
||||
} else if (value == "oblique") {
|
||||
props.fontStyle = FontStyle::Oblique;
|
||||
advance();
|
||||
return true;
|
||||
} else if (value == "normal") {
|
||||
props.fontStyle = FontStyle::Normal;
|
||||
advance();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool
|
||||
FontParser::parseFontVariant(FontProperties& props) {
|
||||
if (check(Token::Type::Identifier)) {
|
||||
const auto& value = currentToken_.getString();
|
||||
if (value == "small-caps") {
|
||||
props.fontVariant = FontVariant::SmallCaps;
|
||||
advance();
|
||||
return true;
|
||||
} else if (value == "normal") {
|
||||
props.fontVariant = FontVariant::Normal;
|
||||
advance();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool
|
||||
FontParser::parseFontWeight(FontProperties& props) {
|
||||
if (check(Token::Type::Number)) {
|
||||
double weightFloat = currentToken_.getNumber();
|
||||
int weight = static_cast<int>(weightFloat);
|
||||
if (weight < 1 || weight > 1000) return false;
|
||||
props.fontWeight = static_cast<uint16_t>(weight);
|
||||
advance();
|
||||
return true;
|
||||
} else if (check(Token::Type::Identifier)) {
|
||||
const auto& value = currentToken_.getString();
|
||||
|
||||
if (auto it = weightMap.find(value); it != weightMap.end()) {
|
||||
props.fontWeight = it->second;
|
||||
advance();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool
|
||||
FontParser::parseFontSize(FontProperties& props) {
|
||||
if (!check(Token::Type::Number)) return false;
|
||||
|
||||
props.fontSize = currentToken_.getNumber();
|
||||
advance();
|
||||
|
||||
double multiplier = 1.0f;
|
||||
if (check(Token::Type::Identifier)) {
|
||||
const auto& unit = currentToken_.getString();
|
||||
|
||||
if (auto it = unitMap.find(unit); it != unitMap.end()) {
|
||||
multiplier = it->second;
|
||||
advance();
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else if (check(Token::Type::Percent)) {
|
||||
multiplier = 16.0f / 100.0f;
|
||||
advance();
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Technically if we consumed some tokens but couldn't parse the font-size,
|
||||
// we should rewind the tokenizer, but I don't think the grammar allows for
|
||||
// any valid alternates in this specific case
|
||||
|
||||
props.fontSize *= multiplier;
|
||||
return true;
|
||||
}
|
||||
|
||||
// line-height is not used by canvas ever, but should still parse
|
||||
bool
|
||||
FontParser::parseLineHeight(FontProperties& props) {
|
||||
if (check(Token::Type::Slash)) {
|
||||
advance();
|
||||
skipWs();
|
||||
if (check(Token::Type::Number)) {
|
||||
advance();
|
||||
if (check(Token::Type::Percent)) {
|
||||
advance();
|
||||
} else if (check(Token::Type::Identifier)) {
|
||||
auto identifier = currentToken_.getString();
|
||||
if (auto it = unitMap.find(identifier); it != unitMap.end()) {
|
||||
advance();
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else if (check(Token::Type::Identifier) && currentToken_.getString() == "normal") {
|
||||
advance();
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
FontParser::parseFontFamily(FontProperties& props) {
|
||||
while (!check(Token::Type::EndOfInput)) {
|
||||
std::string family = "";
|
||||
std::string trailingWs = "";
|
||||
bool found = false;
|
||||
|
||||
while (
|
||||
check(Token::Type::QuotedString) ||
|
||||
check(Token::Type::Identifier) ||
|
||||
check(Token::Type::Whitespace)
|
||||
) {
|
||||
if (check(Token::Type::Whitespace)) {
|
||||
if (found) trailingWs += currentToken_.getString();
|
||||
} else { // Identifier, QuotedString
|
||||
if (found) {
|
||||
family += trailingWs;
|
||||
trailingWs.clear();
|
||||
}
|
||||
|
||||
family += currentToken_.getString();
|
||||
found = true;
|
||||
}
|
||||
|
||||
advance();
|
||||
}
|
||||
|
||||
if (!found) return false; // only whitespace or non-id/string found
|
||||
|
||||
props.fontFamily.push_back(family);
|
||||
|
||||
if (check(Token::Type::Comma)) advance();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
FontProperties
|
||||
FontParser::parse(const std::string& fontString, bool* success) {
|
||||
FontParser parser(fontString);
|
||||
auto result = parser.parseFont();
|
||||
if (success) *success = !parser.hasError_;
|
||||
return result;
|
||||
}
|
||||
|
||||
FontProperties
|
||||
FontParser::parseFont() {
|
||||
FontProperties props;
|
||||
uint8_t state = 0b111;
|
||||
|
||||
skipWs();
|
||||
|
||||
for (size_t i = 0; i < 3 && checkWs(); i++) {
|
||||
if ((state & 0b001) && parseFontStyle(props)) {
|
||||
state &= 0b110;
|
||||
goto match;
|
||||
}
|
||||
|
||||
if ((state & 0b010) && parseFontVariant(props)) {
|
||||
state &= 0b101;
|
||||
goto match;
|
||||
}
|
||||
|
||||
if ((state & 0b100) && parseFontWeight(props)) {
|
||||
state &= 0b011;
|
||||
goto match;
|
||||
}
|
||||
|
||||
break; // all attempts exhausted
|
||||
match: skipWs(); // success: move to the next non-ws token
|
||||
}
|
||||
|
||||
if (parseFontSize(props)) {
|
||||
skipWs();
|
||||
if (parseLineHeight(props) && parseFontFamily(props)) {
|
||||
return props;
|
||||
}
|
||||
}
|
||||
|
||||
hasError_ = true;
|
||||
return props;
|
||||
}
|
||||
@@ -0,0 +1,206 @@
|
||||
# is-glob [](https://www.npmjs.com/package/is-glob) [](https://npmjs.org/package/is-glob) [](https://npmjs.org/package/is-glob) [](https://github.com/micromatch/is-glob/actions)
|
||||
|
||||
> Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience.
|
||||
|
||||
Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support.
|
||||
|
||||
## Install
|
||||
|
||||
Install with [npm](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
$ npm install --save is-glob
|
||||
```
|
||||
|
||||
You might also be interested in [is-valid-glob](https://github.com/jonschlinkert/is-valid-glob) and [has-glob](https://github.com/jonschlinkert/has-glob).
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var isGlob = require('is-glob');
|
||||
```
|
||||
|
||||
### Default behavior
|
||||
|
||||
**True**
|
||||
|
||||
Patterns that have glob characters or regex patterns will return `true`:
|
||||
|
||||
```js
|
||||
isGlob('!foo.js');
|
||||
isGlob('*.js');
|
||||
isGlob('**/abc.js');
|
||||
isGlob('abc/*.js');
|
||||
isGlob('abc/(aaa|bbb).js');
|
||||
isGlob('abc/[a-z].js');
|
||||
isGlob('abc/{a,b}.js');
|
||||
//=> true
|
||||
```
|
||||
|
||||
Extglobs
|
||||
|
||||
```js
|
||||
isGlob('abc/@(a).js');
|
||||
isGlob('abc/!(a).js');
|
||||
isGlob('abc/+(a).js');
|
||||
isGlob('abc/*(a).js');
|
||||
isGlob('abc/?(a).js');
|
||||
//=> true
|
||||
```
|
||||
|
||||
**False**
|
||||
|
||||
Escaped globs or extglobs return `false`:
|
||||
|
||||
```js
|
||||
isGlob('abc/\\@(a).js');
|
||||
isGlob('abc/\\!(a).js');
|
||||
isGlob('abc/\\+(a).js');
|
||||
isGlob('abc/\\*(a).js');
|
||||
isGlob('abc/\\?(a).js');
|
||||
isGlob('\\!foo.js');
|
||||
isGlob('\\*.js');
|
||||
isGlob('\\*\\*/abc.js');
|
||||
isGlob('abc/\\*.js');
|
||||
isGlob('abc/\\(aaa|bbb).js');
|
||||
isGlob('abc/\\[a-z].js');
|
||||
isGlob('abc/\\{a,b}.js');
|
||||
//=> false
|
||||
```
|
||||
|
||||
Patterns that do not have glob patterns return `false`:
|
||||
|
||||
```js
|
||||
isGlob('abc.js');
|
||||
isGlob('abc/def/ghi.js');
|
||||
isGlob('foo.js');
|
||||
isGlob('abc/@.js');
|
||||
isGlob('abc/+.js');
|
||||
isGlob('abc/?.js');
|
||||
isGlob();
|
||||
isGlob(null);
|
||||
//=> false
|
||||
```
|
||||
|
||||
Arrays are also `false` (If you want to check if an array has a glob pattern, use [has-glob](https://github.com/jonschlinkert/has-glob)):
|
||||
|
||||
```js
|
||||
isGlob(['**/*.js']);
|
||||
isGlob(['foo.js']);
|
||||
//=> false
|
||||
```
|
||||
|
||||
### Option strict
|
||||
|
||||
When `options.strict === false` the behavior is less strict in determining if a pattern is a glob. Meaning that
|
||||
some patterns that would return `false` may return `true`. This is done so that matching libraries like [micromatch](https://github.com/micromatch/micromatch) have a chance at determining if the pattern is a glob or not.
|
||||
|
||||
**True**
|
||||
|
||||
Patterns that have glob characters or regex patterns will return `true`:
|
||||
|
||||
```js
|
||||
isGlob('!foo.js', {strict: false});
|
||||
isGlob('*.js', {strict: false});
|
||||
isGlob('**/abc.js', {strict: false});
|
||||
isGlob('abc/*.js', {strict: false});
|
||||
isGlob('abc/(aaa|bbb).js', {strict: false});
|
||||
isGlob('abc/[a-z].js', {strict: false});
|
||||
isGlob('abc/{a,b}.js', {strict: false});
|
||||
//=> true
|
||||
```
|
||||
|
||||
Extglobs
|
||||
|
||||
```js
|
||||
isGlob('abc/@(a).js', {strict: false});
|
||||
isGlob('abc/!(a).js', {strict: false});
|
||||
isGlob('abc/+(a).js', {strict: false});
|
||||
isGlob('abc/*(a).js', {strict: false});
|
||||
isGlob('abc/?(a).js', {strict: false});
|
||||
//=> true
|
||||
```
|
||||
|
||||
**False**
|
||||
|
||||
Escaped globs or extglobs return `false`:
|
||||
|
||||
```js
|
||||
isGlob('\\!foo.js', {strict: false});
|
||||
isGlob('\\*.js', {strict: false});
|
||||
isGlob('\\*\\*/abc.js', {strict: false});
|
||||
isGlob('abc/\\*.js', {strict: false});
|
||||
isGlob('abc/\\(aaa|bbb).js', {strict: false});
|
||||
isGlob('abc/\\[a-z].js', {strict: false});
|
||||
isGlob('abc/\\{a,b}.js', {strict: false});
|
||||
//=> false
|
||||
```
|
||||
|
||||
## About
|
||||
|
||||
<details>
|
||||
<summary><strong>Contributing</strong></summary>
|
||||
|
||||
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Running Tests</strong></summary>
|
||||
|
||||
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
|
||||
|
||||
```sh
|
||||
$ npm install && npm test
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Building docs</strong></summary>
|
||||
|
||||
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
|
||||
|
||||
To generate the readme, run the following command:
|
||||
|
||||
```sh
|
||||
$ npm install -g verbose/verb#dev verb-generate-readme && verb
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Related projects
|
||||
|
||||
You might also be interested in these projects:
|
||||
|
||||
* [assemble](https://www.npmjs.com/package/assemble): Get the rocks out of your socks! Assemble makes you fast at creating web projects… [more](https://github.com/assemble/assemble) | [homepage](https://github.com/assemble/assemble "Get the rocks out of your socks! Assemble makes you fast at creating web projects. Assemble is used by thousands of projects for rapid prototyping, creating themes, scaffolds, boilerplates, e-books, UI components, API documentation, blogs, building websit")
|
||||
* [base](https://www.npmjs.com/package/base): Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks | [homepage](https://github.com/node-base/base "Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks")
|
||||
* [update](https://www.npmjs.com/package/update): Be scalable! Update is a new, open source developer framework and CLI for automating updates… [more](https://github.com/update/update) | [homepage](https://github.com/update/update "Be scalable! Update is a new, open source developer framework and CLI for automating updates of any kind in code projects.")
|
||||
* [verb](https://www.npmjs.com/package/verb): Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used… [more](https://github.com/verbose/verb) | [homepage](https://github.com/verbose/verb "Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used on hundreds of projects of all sizes to generate everything from API docs to readmes.")
|
||||
|
||||
### Contributors
|
||||
|
||||
| **Commits** | **Contributor** |
|
||||
| --- | --- |
|
||||
| 47 | [jonschlinkert](https://github.com/jonschlinkert) |
|
||||
| 5 | [doowb](https://github.com/doowb) |
|
||||
| 1 | [phated](https://github.com/phated) |
|
||||
| 1 | [danhper](https://github.com/danhper) |
|
||||
| 1 | [paulmillr](https://github.com/paulmillr) |
|
||||
|
||||
### Author
|
||||
|
||||
**Jon Schlinkert**
|
||||
|
||||
* [GitHub Profile](https://github.com/jonschlinkert)
|
||||
* [Twitter Profile](https://twitter.com/jonschlinkert)
|
||||
* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert)
|
||||
|
||||
### License
|
||||
|
||||
Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert).
|
||||
Released under the [MIT License](LICENSE).
|
||||
|
||||
***
|
||||
|
||||
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on March 27, 2019._
|
||||
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* @license React
|
||||
* react-refresh-runtime.production.min.js
|
||||
*
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
'use strict';throw Error("React Refresh runtime should not be included in the production bundle.");
|
||||
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"name": "fast-json-stable-stringify",
|
||||
"version": "2.1.0",
|
||||
"description": "deterministic `JSON.stringify()` - a faster version of substack's json-stable-strigify without jsonify",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"benchmark": "^2.1.4",
|
||||
"coveralls": "^3.0.0",
|
||||
"eslint": "^6.7.0",
|
||||
"fast-stable-stringify": "latest",
|
||||
"faster-stable-stringify": "latest",
|
||||
"json-stable-stringify": "latest",
|
||||
"nyc": "^14.1.0",
|
||||
"pre-commit": "^1.2.2",
|
||||
"tape": "^4.11.0"
|
||||
},
|
||||
"scripts": {
|
||||
"eslint": "eslint index.js test",
|
||||
"test-spec": "tape test/*.js",
|
||||
"test": "npm run eslint && nyc npm run test-spec"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/epoberezkin/fast-json-stable-stringify.git"
|
||||
},
|
||||
"homepage": "https://github.com/epoberezkin/fast-json-stable-stringify",
|
||||
"keywords": [
|
||||
"json",
|
||||
"stringify",
|
||||
"deterministic",
|
||||
"hash",
|
||||
"stable"
|
||||
],
|
||||
"author": {
|
||||
"name": "James Halliday",
|
||||
"email": "mail@substack.net",
|
||||
"url": "http://substack.net"
|
||||
},
|
||||
"license": "MIT",
|
||||
"nyc": {
|
||||
"exclude": [
|
||||
"test",
|
||||
"node_modules"
|
||||
],
|
||||
"reporter": [
|
||||
"lcov",
|
||||
"text-summary"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
function isSecure(wsComponents) {
|
||||
return typeof wsComponents.secure === 'boolean' ? wsComponents.secure : String(wsComponents.scheme).toLowerCase() === "wss";
|
||||
}
|
||||
//RFC 6455
|
||||
const handler = {
|
||||
scheme: "ws",
|
||||
domainHost: true,
|
||||
parse: function (components, options) {
|
||||
const wsComponents = components;
|
||||
//indicate if the secure flag is set
|
||||
wsComponents.secure = isSecure(wsComponents);
|
||||
//construct resouce name
|
||||
wsComponents.resourceName = (wsComponents.path || '/') + (wsComponents.query ? '?' + wsComponents.query : '');
|
||||
wsComponents.path = undefined;
|
||||
wsComponents.query = undefined;
|
||||
return wsComponents;
|
||||
},
|
||||
serialize: function (wsComponents, options) {
|
||||
//normalize the default port
|
||||
if (wsComponents.port === (isSecure(wsComponents) ? 443 : 80) || wsComponents.port === "") {
|
||||
wsComponents.port = undefined;
|
||||
}
|
||||
//ensure scheme matches secure flag
|
||||
if (typeof wsComponents.secure === 'boolean') {
|
||||
wsComponents.scheme = (wsComponents.secure ? 'wss' : 'ws');
|
||||
wsComponents.secure = undefined;
|
||||
}
|
||||
//reconstruct path from resource name
|
||||
if (wsComponents.resourceName) {
|
||||
const [path, query] = wsComponents.resourceName.split('?');
|
||||
wsComponents.path = (path && path !== '/' ? path : undefined);
|
||||
wsComponents.query = query;
|
||||
wsComponents.resourceName = undefined;
|
||||
}
|
||||
//forbid fragment component
|
||||
wsComponents.fragment = undefined;
|
||||
return wsComponents;
|
||||
}
|
||||
};
|
||||
export default handler;
|
||||
//# sourceMappingURL=ws.js.map
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,41 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
||||
const jsxRuntime = require("react/jsx-runtime");
|
||||
function Asset({ tag, attrs, children }) {
|
||||
switch (tag) {
|
||||
case "title":
|
||||
return /* @__PURE__ */ jsxRuntime.jsx("title", { ...attrs, suppressHydrationWarning: true, children });
|
||||
case "meta":
|
||||
return /* @__PURE__ */ jsxRuntime.jsx("meta", { ...attrs, suppressHydrationWarning: true });
|
||||
case "link":
|
||||
return /* @__PURE__ */ jsxRuntime.jsx("link", { ...attrs, suppressHydrationWarning: true });
|
||||
case "style":
|
||||
return /* @__PURE__ */ jsxRuntime.jsx(
|
||||
"style",
|
||||
{
|
||||
...attrs,
|
||||
dangerouslySetInnerHTML: { __html: children }
|
||||
}
|
||||
);
|
||||
case "script":
|
||||
if (attrs && attrs.src) {
|
||||
return /* @__PURE__ */ jsxRuntime.jsx("script", { ...attrs, suppressHydrationWarning: true });
|
||||
}
|
||||
if (typeof children === "string")
|
||||
return /* @__PURE__ */ jsxRuntime.jsx(
|
||||
"script",
|
||||
{
|
||||
...attrs,
|
||||
dangerouslySetInnerHTML: {
|
||||
__html: children
|
||||
},
|
||||
suppressHydrationWarning: true
|
||||
}
|
||||
);
|
||||
return null;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
exports.Asset = Asset;
|
||||
//# sourceMappingURL=Asset.cjs.map
|
||||
Binary file not shown.
@@ -0,0 +1,24 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2014 Nathan Rajlich <nathan@tootallnate.net>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={C:{"34":0.0041,"52":0.0041,"61":0.0082,"68":0.01231,"78":0.02051,"106":0.0082,"113":0.02871,"115":0.13126,"118":0.0082,"125":0.0041,"128":0.02461,"129":0.0082,"132":0.0041,"133":0.0041,"134":0.01231,"135":0.24612,"136":0.71785,"137":0.0041,_:"2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 53 54 55 56 57 58 59 60 62 63 64 65 66 67 69 70 71 72 73 74 75 76 77 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 107 108 109 110 111 112 114 116 117 119 120 121 122 123 124 126 127 130 131 138 139 140 3.5 3.6"},D:{"38":0.0041,"40":0.0041,"41":0.0041,"43":0.0041,"44":0.0041,"45":0.0041,"46":0.0041,"47":0.06563,"48":0.0041,"49":0.0082,"50":0.0041,"51":0.0041,"52":0.0041,"53":0.0041,"54":0.0041,"55":0.0041,"56":0.0041,"57":0.0041,"58":0.01231,"59":0.0041,"60":0.0041,"63":0.0041,"65":0.0041,"68":0.01231,"69":0.02051,"70":0.0082,"73":0.02871,"74":0.0041,"76":0.04512,"77":0.0041,"78":0.02051,"79":0.26663,"80":0.0082,"83":0.12306,"86":0.0041,"87":0.46763,"88":0.07794,"90":0.0041,"91":0.04512,"92":0.01231,"93":0.02461,"94":0.09845,"95":0.0041,"96":0.0041,"98":0.02871,"100":0.04512,"101":0.01231,"102":0.02051,"103":0.05743,"104":0.23381,"105":0.0041,"106":0.02871,"107":0.01641,"108":0.18049,"109":2.76885,"110":0.04102,"111":0.10255,"112":0.0082,"113":0.01231,"114":0.02461,"115":0.0082,"116":0.09845,"117":0.0041,"118":0.01231,"119":0.03282,"120":0.06153,"121":0.01641,"122":0.06563,"123":0.03282,"124":0.09024,"125":0.07384,"126":0.07384,"127":0.06973,"128":0.07794,"129":0.05743,"130":0.05743,"131":0.27073,"132":0.43891,"133":7.76509,"134":15.71886,"135":0.01641,"136":0.0041,_:"4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 39 42 61 62 64 66 67 71 72 75 81 84 85 89 97 99 137 138"},F:{"28":0.0082,"40":0.0041,"42":0.0041,"46":0.09024,"79":0.0082,"85":0.01641,"86":0.02871,"87":0.0082,"94":0.0082,"95":0.27894,"102":0.0041,"114":0.0082,"115":0.0041,"116":0.35277,"117":2.00588,_:"9 11 12 15 16 17 18 19 20 21 22 23 24 25 26 27 29 30 31 32 33 34 35 36 37 38 39 41 43 44 45 47 48 49 50 51 52 53 54 55 56 57 58 60 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 80 81 82 83 84 88 89 90 91 92 93 96 97 98 99 100 101 103 104 105 106 107 108 109 110 111 112 113 9.5-9.6 10.0-10.1 10.5 10.6 11.1 11.5 11.6 12.1"},B:{"13":0.0082,"14":0.06153,"16":0.0082,"18":0.01231,"92":0.01231,"107":0.0041,"109":0.02461,"110":0.02461,"114":0.01231,"118":0.0041,"119":0.0041,"120":0.0041,"121":0.0041,"122":0.0041,"124":0.0041,"126":0.0041,"127":0.0041,"128":0.01641,"129":0.02871,"130":0.02871,"131":0.04512,"132":0.09024,"133":0.86552,"134":1.87461,_:"12 15 17 79 80 81 83 84 85 86 87 88 89 90 91 93 94 95 96 97 98 99 100 101 102 103 104 105 106 108 111 112 113 115 116 117 123 125"},E:{"15":0.0041,_:"0 4 5 6 7 8 9 10 11 12 13 14 3.1 3.2 5.1 6.1 7.1 9.1 10.1 15.1","11.1":0.0041,"12.1":0.0082,"13.1":0.0082,"14.1":0.01641,"15.2-15.3":0.0041,"15.4":0.0041,"15.5":0.0041,"15.6":0.04102,"16.0":0.0041,"16.1":0.02051,"16.2":0.0041,"16.3":0.02461,"16.4":0.0082,"16.5":0.01641,"16.6":0.06563,"17.0":0.0041,"17.1":0.05743,"17.2":0.01231,"17.3":0.01231,"17.4":0.02871,"17.5":0.03282,"17.6":0.10255,"18.0":0.03282,"18.1":0.09845,"18.2":0.04922,"18.3":0.66452,"18.4":0.0082},G:{"8":0,"3.2":0,"4.0-4.1":0,"4.2-4.3":0.00214,"5.0-5.1":0,"6.0-6.1":0.00641,"7.0-7.1":0.00427,"8.1-8.4":0,"9.0-9.2":0.0032,"9.3":0.01495,"10.0-10.2":0.00107,"10.3":0.02456,"11.0-11.2":0.11318,"11.3-11.4":0.00747,"12.0-12.1":0.00427,"12.2-12.5":0.1057,"13.0-13.1":0.00214,"13.2":0.0032,"13.3":0.00427,"13.4-13.7":0.01495,"14.0-14.4":0.03737,"14.5-14.8":0.04484,"15.0-15.1":0.02456,"15.2-15.3":0.02456,"15.4":0.0299,"15.5":0.03417,"15.6-15.8":0.42068,"16.0":0.05979,"16.1":0.12279,"16.2":0.06406,"16.3":0.11104,"16.4":0.02456,"16.5":0.04591,"16.6-16.7":0.49862,"17.0":0.0299,"17.1":0.05339,"17.2":0.04057,"17.3":0.05659,"17.4":0.11318,"17.5":0.25198,"17.6-17.7":0.73139,"18.0":0.205,"18.1":0.67053,"18.2":0.30003,"18.3":6.27071,"18.4":0.09289},P:{"4":0.53339,"20":0.01067,"21":0.01067,"22":0.04267,"23":0.02134,"24":0.06401,"25":0.032,"26":0.06401,"27":1.23747,"5.0-5.4":0.07467,"6.2-6.4":0.07467,"7.2-7.4":0.09601,_:"8.2 9.2 10.1 11.1-11.2 12.0 13.0 14.0 15.0 16.0 18.0","17.0":0.01067,"19.0":0.01067},I:{"0":0.10596,"3":0,"4":0,"2.1":0,"2.2":0,"2.3":0,"4.1":0,"4.2-4.3":0.00003,"4.4":0,"4.4.3-4.4.4":0.00012},K:{"0":0.32445,_:"10 11 12 11.1 11.5 12.1"},A:{"11":0.02051,_:"6 7 8 9 10 5.5"},S:{_:"2.5 3.0-3.1"},J:{_:"7 10"},N:{_:"10 11"},R:{_:"0"},M:{"0":0.11798},Q:{_:"14.9"},O:{"0":0.05899},H:{"0":0},L:{"0":46.13531}};
|
||||
@@ -0,0 +1,9 @@
|
||||
import { AllContext, RouteById } from './routeInfo.cjs';
|
||||
import { AnyRouter } from './router.cjs';
|
||||
import { Expand, StrictOrFrom } from './utils.cjs';
|
||||
export interface UseRouteContextBaseOptions<TRouter extends AnyRouter, TFrom, TStrict extends boolean, TSelected> {
|
||||
select?: (search: ResolveUseRouteContext<TRouter, TFrom, TStrict>) => TSelected;
|
||||
}
|
||||
export type UseRouteContextOptions<TRouter extends AnyRouter, TFrom extends string | undefined, TStrict extends boolean, TSelected> = StrictOrFrom<TRouter, TFrom, TStrict> & UseRouteContextBaseOptions<TRouter, TFrom, TStrict, TSelected>;
|
||||
export type ResolveUseRouteContext<TRouter extends AnyRouter, TFrom, TStrict extends boolean> = TStrict extends false ? AllContext<TRouter['routeTree']> : Expand<RouteById<TRouter['routeTree'], TFrom>['types']['allContext']>;
|
||||
export type UseRouteContextResult<TRouter extends AnyRouter, TFrom, TStrict extends boolean, TSelected> = unknown extends TSelected ? ResolveUseRouteContext<TRouter, TFrom, TStrict> : TSelected;
|
||||
@@ -0,0 +1,14 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = _getPrototypeOf;
|
||||
function _getPrototypeOf(o) {
|
||||
exports.default = _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf.bind() : function _getPrototypeOf(o) {
|
||||
return o.__proto__ || Object.getPrototypeOf(o);
|
||||
};
|
||||
return _getPrototypeOf(o);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=getPrototypeOf.js.map
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
@@ -0,0 +1,57 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = isNodesEquivalent;
|
||||
var _index = require("../definitions/index.js");
|
||||
function isNodesEquivalent(a, b) {
|
||||
if (typeof a !== "object" || typeof b !== "object" || a == null || b == null) {
|
||||
return a === b;
|
||||
}
|
||||
if (a.type !== b.type) {
|
||||
return false;
|
||||
}
|
||||
const fields = Object.keys(_index.NODE_FIELDS[a.type] || a.type);
|
||||
const visitorKeys = _index.VISITOR_KEYS[a.type];
|
||||
for (const field of fields) {
|
||||
const val_a = a[field];
|
||||
const val_b = b[field];
|
||||
if (typeof val_a !== typeof val_b) {
|
||||
return false;
|
||||
}
|
||||
if (val_a == null && val_b == null) {
|
||||
continue;
|
||||
} else if (val_a == null || val_b == null) {
|
||||
return false;
|
||||
}
|
||||
if (Array.isArray(val_a)) {
|
||||
if (!Array.isArray(val_b)) {
|
||||
return false;
|
||||
}
|
||||
if (val_a.length !== val_b.length) {
|
||||
return false;
|
||||
}
|
||||
for (let i = 0; i < val_a.length; i++) {
|
||||
if (!isNodesEquivalent(val_a[i], val_b[i])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (typeof val_a === "object" && !(visitorKeys != null && visitorKeys.includes(field))) {
|
||||
for (const key of Object.keys(val_a)) {
|
||||
if (val_a[key] !== val_b[key]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (!isNodesEquivalent(val_a, val_b)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=isNodesEquivalent.js.map
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"1":"F A B","2":"K D mC","129":"E"},B:{"1":"0 9 C L M G N O P Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I"},C:{"1":"0 1 2 3 4 5 6 7 8 9 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC oC pC qC rC","2":"nC LC"},D:{"1":"0 1 2 3 4 5 6 7 8 9 J PB K D E F A B C L M G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB MC wB NC xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB I PC EC QC RC"},E:{"1":"J PB K D E F A B C L M G tC uC vC wC TC FC GC xC yC zC UC VC HC 0C IC WC XC YC ZC aC 1C JC bC cC dC eC fC 2C KC gC hC iC jC 3C","2":"sC SC"},F:{"1":"0 1 2 3 4 5 6 7 8 B C G N O P QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB lB mB nB oB pB qB rB sB tB uB vB wB xB yB zB 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC Q H R OC S T U V W X Y Z a b c d e f g h i j k l m n o p q r s t u v w x y z 6C 7C FC kC 8C GC","2":"F 4C 5C"},G:{"1":"E 9C lC AD BD CD DD ED FD GD HD ID JD KD LD MD ND OD PD QD RD SD UC VC HC TD IC WC XC YC ZC aC UD JC bC cC dC eC fC VD KC gC hC iC jC","2":"SC"},H:{"1":"WD"},I:{"1":"LC J I XD YD ZD aD lC bD cD"},J:{"1":"D A"},K:{"1":"A B C H FC kC GC"},L:{"1":"I"},M:{"1":"EC"},N:{"1":"A B"},O:{"1":"HC"},P:{"1":"1 2 3 4 5 6 7 8 J dD eD fD gD hD TC iD jD kD lD mD IC JC KC nD"},Q:{"1":"oD"},R:{"1":"pD"},S:{"1":"qD rD"}},B:6,C:"JSON parsing",D:true};
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1 @@
|
||||
{ "type": "module" }
|
||||
@@ -0,0 +1,728 @@
|
||||
/*
|
||||
MIT License http://www.opensource.org/licenses/mit-license.php
|
||||
Author Tobias Koppers @sokra
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
const versions = require("process").versions;
|
||||
const Resolver = require("./Resolver");
|
||||
const { getType, PathType } = require("./util/path");
|
||||
|
||||
const SyncAsyncFileSystemDecorator = require("./SyncAsyncFileSystemDecorator");
|
||||
|
||||
const AliasFieldPlugin = require("./AliasFieldPlugin");
|
||||
const AliasPlugin = require("./AliasPlugin");
|
||||
const AppendPlugin = require("./AppendPlugin");
|
||||
const ConditionalPlugin = require("./ConditionalPlugin");
|
||||
const DescriptionFilePlugin = require("./DescriptionFilePlugin");
|
||||
const DirectoryExistsPlugin = require("./DirectoryExistsPlugin");
|
||||
const ExportsFieldPlugin = require("./ExportsFieldPlugin");
|
||||
const ExtensionAliasPlugin = require("./ExtensionAliasPlugin");
|
||||
const FileExistsPlugin = require("./FileExistsPlugin");
|
||||
const ImportsFieldPlugin = require("./ImportsFieldPlugin");
|
||||
const JoinRequestPartPlugin = require("./JoinRequestPartPlugin");
|
||||
const JoinRequestPlugin = require("./JoinRequestPlugin");
|
||||
const MainFieldPlugin = require("./MainFieldPlugin");
|
||||
const ModulesInHierarchicalDirectoriesPlugin = require("./ModulesInHierarchicalDirectoriesPlugin");
|
||||
const ModulesInRootPlugin = require("./ModulesInRootPlugin");
|
||||
const NextPlugin = require("./NextPlugin");
|
||||
const ParsePlugin = require("./ParsePlugin");
|
||||
const PnpPlugin = require("./PnpPlugin");
|
||||
const RestrictionsPlugin = require("./RestrictionsPlugin");
|
||||
const ResultPlugin = require("./ResultPlugin");
|
||||
const RootsPlugin = require("./RootsPlugin");
|
||||
const SelfReferencePlugin = require("./SelfReferencePlugin");
|
||||
const SymlinkPlugin = require("./SymlinkPlugin");
|
||||
const TryNextPlugin = require("./TryNextPlugin");
|
||||
const UnsafeCachePlugin = require("./UnsafeCachePlugin");
|
||||
const UseFilePlugin = require("./UseFilePlugin");
|
||||
|
||||
/** @typedef {import("./AliasPlugin").AliasOption} AliasOptionEntry */
|
||||
/** @typedef {import("./ExtensionAliasPlugin").ExtensionAliasOption} ExtensionAliasOption */
|
||||
/** @typedef {import("./PnpPlugin").PnpApiImpl} PnpApi */
|
||||
/** @typedef {import("./Resolver").EnsuredHooks} EnsuredHooks */
|
||||
/** @typedef {import("./Resolver").FileSystem} FileSystem */
|
||||
/** @typedef {import("./Resolver").KnownHooks} KnownHooks */
|
||||
/** @typedef {import("./Resolver").ResolveRequest} ResolveRequest */
|
||||
/** @typedef {import("./Resolver").SyncFileSystem} SyncFileSystem */
|
||||
|
||||
/** @typedef {string|string[]|false} AliasOptionNewRequest */
|
||||
/** @typedef {{[k: string]: AliasOptionNewRequest}} AliasOptions */
|
||||
/** @typedef {{[k: string]: string|string[] }} ExtensionAliasOptions */
|
||||
/** @typedef {false | 0 | "" | null | undefined} Falsy */
|
||||
/** @typedef {{apply: function(Resolver): void} | (function(this: Resolver, Resolver): void) | Falsy} Plugin */
|
||||
|
||||
/**
|
||||
* @typedef {Object} UserResolveOptions
|
||||
* @property {(AliasOptions | AliasOptionEntry[])=} alias A list of module alias configurations or an object which maps key to value
|
||||
* @property {(AliasOptions | AliasOptionEntry[])=} fallback A list of module alias configurations or an object which maps key to value, applied only after modules option
|
||||
* @property {ExtensionAliasOptions=} extensionAlias An object which maps extension to extension aliases
|
||||
* @property {(string | string[])[]=} aliasFields A list of alias fields in description files
|
||||
* @property {(function(ResolveRequest): boolean)=} cachePredicate A function which decides whether a request should be cached or not. An object is passed with at least `path` and `request` properties.
|
||||
* @property {boolean=} cacheWithContext Whether or not the unsafeCache should include request context as part of the cache key.
|
||||
* @property {string[]=} descriptionFiles A list of description files to read from
|
||||
* @property {string[]=} conditionNames A list of exports field condition names.
|
||||
* @property {boolean=} enforceExtension Enforce that a extension from extensions must be used
|
||||
* @property {(string | string[])[]=} exportsFields A list of exports fields in description files
|
||||
* @property {(string | string[])[]=} importsFields A list of imports fields in description files
|
||||
* @property {string[]=} extensions A list of extensions which should be tried for files
|
||||
* @property {FileSystem} fileSystem The file system which should be used
|
||||
* @property {(object | boolean)=} unsafeCache Use this cache object to unsafely cache the successful requests
|
||||
* @property {boolean=} symlinks Resolve symlinks to their symlinked location
|
||||
* @property {Resolver=} resolver A prepared Resolver to which the plugins are attached
|
||||
* @property {string[] | string=} modules A list of directories to resolve modules from, can be absolute path or folder name
|
||||
* @property {(string | string[] | {name: string | string[], forceRelative: boolean})[]=} mainFields A list of main fields in description files
|
||||
* @property {string[]=} mainFiles A list of main files in directories
|
||||
* @property {Plugin[]=} plugins A list of additional resolve plugins which should be applied
|
||||
* @property {PnpApi | null=} pnpApi A PnP API that should be used - null is "never", undefined is "auto"
|
||||
* @property {string[]=} roots A list of root paths
|
||||
* @property {boolean=} fullySpecified The request is already fully specified and no extensions or directories are resolved for it
|
||||
* @property {boolean=} resolveToContext Resolve to a context instead of a file
|
||||
* @property {(string|RegExp)[]=} restrictions A list of resolve restrictions
|
||||
* @property {boolean=} useSyncFileSystemCalls Use only the sync constraints of the file system calls
|
||||
* @property {boolean=} preferRelative Prefer to resolve module requests as relative requests before falling back to modules
|
||||
* @property {boolean=} preferAbsolute Prefer to resolve server-relative urls as absolute paths before falling back to resolve in roots
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ResolveOptions
|
||||
* @property {AliasOptionEntry[]} alias
|
||||
* @property {AliasOptionEntry[]} fallback
|
||||
* @property {Set<string | string[]>} aliasFields
|
||||
* @property {ExtensionAliasOption[]} extensionAlias
|
||||
* @property {(function(ResolveRequest): boolean)} cachePredicate
|
||||
* @property {boolean} cacheWithContext
|
||||
* @property {Set<string>} conditionNames A list of exports field condition names.
|
||||
* @property {string[]} descriptionFiles
|
||||
* @property {boolean} enforceExtension
|
||||
* @property {Set<string | string[]>} exportsFields
|
||||
* @property {Set<string | string[]>} importsFields
|
||||
* @property {Set<string>} extensions
|
||||
* @property {FileSystem} fileSystem
|
||||
* @property {object | false} unsafeCache
|
||||
* @property {boolean} symlinks
|
||||
* @property {Resolver=} resolver
|
||||
* @property {Array<string | string[]>} modules
|
||||
* @property {{name: string[], forceRelative: boolean}[]} mainFields
|
||||
* @property {Set<string>} mainFiles
|
||||
* @property {Plugin[]} plugins
|
||||
* @property {PnpApi | null} pnpApi
|
||||
* @property {Set<string>} roots
|
||||
* @property {boolean} fullySpecified
|
||||
* @property {boolean} resolveToContext
|
||||
* @property {Set<string|RegExp>} restrictions
|
||||
* @property {boolean} preferRelative
|
||||
* @property {boolean} preferAbsolute
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {PnpApi | null=} option option
|
||||
* @returns {PnpApi | null} processed option
|
||||
*/
|
||||
function processPnpApiOption(option) {
|
||||
if (
|
||||
option === undefined &&
|
||||
/** @type {NodeJS.ProcessVersions & {pnp: string}} */ versions.pnp
|
||||
) {
|
||||
const _findPnpApi =
|
||||
/** @type {function(string): PnpApi | null}} */
|
||||
(
|
||||
// @ts-ignore
|
||||
require("module").findPnpApi
|
||||
);
|
||||
|
||||
if (_findPnpApi) {
|
||||
return {
|
||||
resolveToUnqualified(request, issuer, opts) {
|
||||
const pnpapi = _findPnpApi(issuer);
|
||||
|
||||
if (!pnpapi) {
|
||||
// Issuer isn't managed by PnP
|
||||
return null;
|
||||
}
|
||||
|
||||
return pnpapi.resolveToUnqualified(request, issuer, opts);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return option || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AliasOptions | AliasOptionEntry[] | undefined} alias alias
|
||||
* @returns {AliasOptionEntry[]} normalized aliases
|
||||
*/
|
||||
function normalizeAlias(alias) {
|
||||
return typeof alias === "object" && !Array.isArray(alias) && alias !== null
|
||||
? Object.keys(alias).map(key => {
|
||||
/** @type {AliasOptionEntry} */
|
||||
const obj = { name: key, onlyModule: false, alias: alias[key] };
|
||||
|
||||
if (/\$$/.test(key)) {
|
||||
obj.onlyModule = true;
|
||||
obj.name = key.slice(0, -1);
|
||||
}
|
||||
|
||||
return obj;
|
||||
})
|
||||
: /** @type {Array<AliasOptionEntry>} */ (alias) || [];
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UserResolveOptions} options input options
|
||||
* @returns {ResolveOptions} output options
|
||||
*/
|
||||
function createOptions(options) {
|
||||
const mainFieldsSet = new Set(options.mainFields || ["main"]);
|
||||
/** @type {ResolveOptions["mainFields"]} */
|
||||
const mainFields = [];
|
||||
|
||||
for (const item of mainFieldsSet) {
|
||||
if (typeof item === "string") {
|
||||
mainFields.push({
|
||||
name: [item],
|
||||
forceRelative: true
|
||||
});
|
||||
} else if (Array.isArray(item)) {
|
||||
mainFields.push({
|
||||
name: item,
|
||||
forceRelative: true
|
||||
});
|
||||
} else {
|
||||
mainFields.push({
|
||||
name: Array.isArray(item.name) ? item.name : [item.name],
|
||||
forceRelative: item.forceRelative
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
alias: normalizeAlias(options.alias),
|
||||
fallback: normalizeAlias(options.fallback),
|
||||
aliasFields: new Set(options.aliasFields),
|
||||
cachePredicate:
|
||||
options.cachePredicate ||
|
||||
function () {
|
||||
return true;
|
||||
},
|
||||
cacheWithContext:
|
||||
typeof options.cacheWithContext !== "undefined"
|
||||
? options.cacheWithContext
|
||||
: true,
|
||||
exportsFields: new Set(options.exportsFields || ["exports"]),
|
||||
importsFields: new Set(options.importsFields || ["imports"]),
|
||||
conditionNames: new Set(options.conditionNames),
|
||||
descriptionFiles: Array.from(
|
||||
new Set(options.descriptionFiles || ["package.json"])
|
||||
),
|
||||
enforceExtension:
|
||||
options.enforceExtension === undefined
|
||||
? options.extensions && options.extensions.includes("")
|
||||
? true
|
||||
: false
|
||||
: options.enforceExtension,
|
||||
extensions: new Set(options.extensions || [".js", ".json", ".node"]),
|
||||
extensionAlias: options.extensionAlias
|
||||
? Object.keys(options.extensionAlias).map(k => ({
|
||||
extension: k,
|
||||
alias: /** @type {ExtensionAliasOptions} */ (options.extensionAlias)[
|
||||
k
|
||||
]
|
||||
}))
|
||||
: [],
|
||||
fileSystem: options.useSyncFileSystemCalls
|
||||
? new SyncAsyncFileSystemDecorator(
|
||||
/** @type {SyncFileSystem} */ (
|
||||
/** @type {unknown} */ (options.fileSystem)
|
||||
)
|
||||
)
|
||||
: options.fileSystem,
|
||||
unsafeCache:
|
||||
options.unsafeCache && typeof options.unsafeCache !== "object"
|
||||
? {}
|
||||
: options.unsafeCache || false,
|
||||
symlinks: typeof options.symlinks !== "undefined" ? options.symlinks : true,
|
||||
resolver: options.resolver,
|
||||
modules: mergeFilteredToArray(
|
||||
Array.isArray(options.modules)
|
||||
? options.modules
|
||||
: options.modules
|
||||
? [options.modules]
|
||||
: ["node_modules"],
|
||||
item => {
|
||||
const type = getType(item);
|
||||
return type === PathType.Normal || type === PathType.Relative;
|
||||
}
|
||||
),
|
||||
mainFields,
|
||||
mainFiles: new Set(options.mainFiles || ["index"]),
|
||||
plugins: options.plugins || [],
|
||||
pnpApi: processPnpApiOption(options.pnpApi),
|
||||
roots: new Set(options.roots || undefined),
|
||||
fullySpecified: options.fullySpecified || false,
|
||||
resolveToContext: options.resolveToContext || false,
|
||||
preferRelative: options.preferRelative || false,
|
||||
preferAbsolute: options.preferAbsolute || false,
|
||||
restrictions: new Set(options.restrictions)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UserResolveOptions} options resolve options
|
||||
* @returns {Resolver} created resolver
|
||||
*/
|
||||
exports.createResolver = function (options) {
|
||||
const normalizedOptions = createOptions(options);
|
||||
|
||||
const {
|
||||
alias,
|
||||
fallback,
|
||||
aliasFields,
|
||||
cachePredicate,
|
||||
cacheWithContext,
|
||||
conditionNames,
|
||||
descriptionFiles,
|
||||
enforceExtension,
|
||||
exportsFields,
|
||||
extensionAlias,
|
||||
importsFields,
|
||||
extensions,
|
||||
fileSystem,
|
||||
fullySpecified,
|
||||
mainFields,
|
||||
mainFiles,
|
||||
modules,
|
||||
plugins: userPlugins,
|
||||
pnpApi,
|
||||
resolveToContext,
|
||||
preferRelative,
|
||||
preferAbsolute,
|
||||
symlinks,
|
||||
unsafeCache,
|
||||
resolver: customResolver,
|
||||
restrictions,
|
||||
roots
|
||||
} = normalizedOptions;
|
||||
|
||||
const plugins = userPlugins.slice();
|
||||
|
||||
const resolver = customResolver
|
||||
? customResolver
|
||||
: new Resolver(fileSystem, normalizedOptions);
|
||||
|
||||
//// pipeline ////
|
||||
|
||||
resolver.ensureHook("resolve");
|
||||
resolver.ensureHook("internalResolve");
|
||||
resolver.ensureHook("newInternalResolve");
|
||||
resolver.ensureHook("parsedResolve");
|
||||
resolver.ensureHook("describedResolve");
|
||||
resolver.ensureHook("rawResolve");
|
||||
resolver.ensureHook("normalResolve");
|
||||
resolver.ensureHook("internal");
|
||||
resolver.ensureHook("rawModule");
|
||||
resolver.ensureHook("alternateRawModule");
|
||||
resolver.ensureHook("module");
|
||||
resolver.ensureHook("resolveAsModule");
|
||||
resolver.ensureHook("undescribedResolveInPackage");
|
||||
resolver.ensureHook("resolveInPackage");
|
||||
resolver.ensureHook("resolveInExistingDirectory");
|
||||
resolver.ensureHook("relative");
|
||||
resolver.ensureHook("describedRelative");
|
||||
resolver.ensureHook("directory");
|
||||
resolver.ensureHook("undescribedExistingDirectory");
|
||||
resolver.ensureHook("existingDirectory");
|
||||
resolver.ensureHook("undescribedRawFile");
|
||||
resolver.ensureHook("rawFile");
|
||||
resolver.ensureHook("file");
|
||||
resolver.ensureHook("finalFile");
|
||||
resolver.ensureHook("existingFile");
|
||||
resolver.ensureHook("resolved");
|
||||
|
||||
// TODO remove in next major
|
||||
// cspell:word Interal
|
||||
// Backward-compat
|
||||
// @ts-ignore
|
||||
resolver.hooks.newInteralResolve = resolver.hooks.newInternalResolve;
|
||||
|
||||
// resolve
|
||||
for (const { source, resolveOptions } of [
|
||||
{ source: "resolve", resolveOptions: { fullySpecified } },
|
||||
{ source: "internal-resolve", resolveOptions: { fullySpecified: false } }
|
||||
]) {
|
||||
if (unsafeCache) {
|
||||
plugins.push(
|
||||
new UnsafeCachePlugin(
|
||||
source,
|
||||
cachePredicate,
|
||||
/** @type {import("./UnsafeCachePlugin").Cache} */ (unsafeCache),
|
||||
cacheWithContext,
|
||||
`new-${source}`
|
||||
)
|
||||
);
|
||||
plugins.push(
|
||||
new ParsePlugin(`new-${source}`, resolveOptions, "parsed-resolve")
|
||||
);
|
||||
} else {
|
||||
plugins.push(new ParsePlugin(source, resolveOptions, "parsed-resolve"));
|
||||
}
|
||||
}
|
||||
|
||||
// parsed-resolve
|
||||
plugins.push(
|
||||
new DescriptionFilePlugin(
|
||||
"parsed-resolve",
|
||||
descriptionFiles,
|
||||
false,
|
||||
"described-resolve"
|
||||
)
|
||||
);
|
||||
plugins.push(new NextPlugin("after-parsed-resolve", "described-resolve"));
|
||||
|
||||
// described-resolve
|
||||
plugins.push(new NextPlugin("described-resolve", "raw-resolve"));
|
||||
if (fallback.length > 0) {
|
||||
plugins.push(
|
||||
new AliasPlugin("described-resolve", fallback, "internal-resolve")
|
||||
);
|
||||
}
|
||||
|
||||
// raw-resolve
|
||||
if (alias.length > 0) {
|
||||
plugins.push(new AliasPlugin("raw-resolve", alias, "internal-resolve"));
|
||||
}
|
||||
aliasFields.forEach(item => {
|
||||
plugins.push(new AliasFieldPlugin("raw-resolve", item, "internal-resolve"));
|
||||
});
|
||||
extensionAlias.forEach(item =>
|
||||
plugins.push(
|
||||
new ExtensionAliasPlugin("raw-resolve", item, "normal-resolve")
|
||||
)
|
||||
);
|
||||
plugins.push(new NextPlugin("raw-resolve", "normal-resolve"));
|
||||
|
||||
// normal-resolve
|
||||
if (preferRelative) {
|
||||
plugins.push(new JoinRequestPlugin("after-normal-resolve", "relative"));
|
||||
}
|
||||
plugins.push(
|
||||
new ConditionalPlugin(
|
||||
"after-normal-resolve",
|
||||
{ module: true },
|
||||
"resolve as module",
|
||||
false,
|
||||
"raw-module"
|
||||
)
|
||||
);
|
||||
plugins.push(
|
||||
new ConditionalPlugin(
|
||||
"after-normal-resolve",
|
||||
{ internal: true },
|
||||
"resolve as internal import",
|
||||
false,
|
||||
"internal"
|
||||
)
|
||||
);
|
||||
if (preferAbsolute) {
|
||||
plugins.push(new JoinRequestPlugin("after-normal-resolve", "relative"));
|
||||
}
|
||||
if (roots.size > 0) {
|
||||
plugins.push(new RootsPlugin("after-normal-resolve", roots, "relative"));
|
||||
}
|
||||
if (!preferRelative && !preferAbsolute) {
|
||||
plugins.push(new JoinRequestPlugin("after-normal-resolve", "relative"));
|
||||
}
|
||||
|
||||
// internal
|
||||
importsFields.forEach(importsField => {
|
||||
plugins.push(
|
||||
new ImportsFieldPlugin(
|
||||
"internal",
|
||||
conditionNames,
|
||||
importsField,
|
||||
"relative",
|
||||
"internal-resolve"
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
// raw-module
|
||||
exportsFields.forEach(exportsField => {
|
||||
plugins.push(
|
||||
new SelfReferencePlugin("raw-module", exportsField, "resolve-as-module")
|
||||
);
|
||||
});
|
||||
modules.forEach(item => {
|
||||
if (Array.isArray(item)) {
|
||||
if (item.includes("node_modules") && pnpApi) {
|
||||
plugins.push(
|
||||
new ModulesInHierarchicalDirectoriesPlugin(
|
||||
"raw-module",
|
||||
item.filter(i => i !== "node_modules"),
|
||||
"module"
|
||||
)
|
||||
);
|
||||
plugins.push(
|
||||
new PnpPlugin(
|
||||
"raw-module",
|
||||
pnpApi,
|
||||
"undescribed-resolve-in-package",
|
||||
"alternate-raw-module"
|
||||
)
|
||||
);
|
||||
|
||||
plugins.push(
|
||||
new ModulesInHierarchicalDirectoriesPlugin(
|
||||
"alternate-raw-module",
|
||||
["node_modules"],
|
||||
"module"
|
||||
)
|
||||
);
|
||||
} else {
|
||||
plugins.push(
|
||||
new ModulesInHierarchicalDirectoriesPlugin(
|
||||
"raw-module",
|
||||
item,
|
||||
"module"
|
||||
)
|
||||
);
|
||||
}
|
||||
} else {
|
||||
plugins.push(new ModulesInRootPlugin("raw-module", item, "module"));
|
||||
}
|
||||
});
|
||||
|
||||
// module
|
||||
plugins.push(new JoinRequestPartPlugin("module", "resolve-as-module"));
|
||||
|
||||
// resolve-as-module
|
||||
if (!resolveToContext) {
|
||||
plugins.push(
|
||||
new ConditionalPlugin(
|
||||
"resolve-as-module",
|
||||
{ directory: false, request: "." },
|
||||
"single file module",
|
||||
true,
|
||||
"undescribed-raw-file"
|
||||
)
|
||||
);
|
||||
}
|
||||
plugins.push(
|
||||
new DirectoryExistsPlugin(
|
||||
"resolve-as-module",
|
||||
"undescribed-resolve-in-package"
|
||||
)
|
||||
);
|
||||
|
||||
// undescribed-resolve-in-package
|
||||
plugins.push(
|
||||
new DescriptionFilePlugin(
|
||||
"undescribed-resolve-in-package",
|
||||
descriptionFiles,
|
||||
false,
|
||||
"resolve-in-package"
|
||||
)
|
||||
);
|
||||
plugins.push(
|
||||
new NextPlugin("after-undescribed-resolve-in-package", "resolve-in-package")
|
||||
);
|
||||
|
||||
// resolve-in-package
|
||||
exportsFields.forEach(exportsField => {
|
||||
plugins.push(
|
||||
new ExportsFieldPlugin(
|
||||
"resolve-in-package",
|
||||
conditionNames,
|
||||
exportsField,
|
||||
"relative"
|
||||
)
|
||||
);
|
||||
});
|
||||
plugins.push(
|
||||
new NextPlugin("resolve-in-package", "resolve-in-existing-directory")
|
||||
);
|
||||
|
||||
// resolve-in-existing-directory
|
||||
plugins.push(
|
||||
new JoinRequestPlugin("resolve-in-existing-directory", "relative")
|
||||
);
|
||||
|
||||
// relative
|
||||
plugins.push(
|
||||
new DescriptionFilePlugin(
|
||||
"relative",
|
||||
descriptionFiles,
|
||||
true,
|
||||
"described-relative"
|
||||
)
|
||||
);
|
||||
plugins.push(new NextPlugin("after-relative", "described-relative"));
|
||||
|
||||
// described-relative
|
||||
if (resolveToContext) {
|
||||
plugins.push(new NextPlugin("described-relative", "directory"));
|
||||
} else {
|
||||
plugins.push(
|
||||
new ConditionalPlugin(
|
||||
"described-relative",
|
||||
{ directory: false },
|
||||
null,
|
||||
true,
|
||||
"raw-file"
|
||||
)
|
||||
);
|
||||
plugins.push(
|
||||
new ConditionalPlugin(
|
||||
"described-relative",
|
||||
{ fullySpecified: false },
|
||||
"as directory",
|
||||
true,
|
||||
"directory"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// directory
|
||||
plugins.push(
|
||||
new DirectoryExistsPlugin("directory", "undescribed-existing-directory")
|
||||
);
|
||||
|
||||
if (resolveToContext) {
|
||||
// undescribed-existing-directory
|
||||
plugins.push(new NextPlugin("undescribed-existing-directory", "resolved"));
|
||||
} else {
|
||||
// undescribed-existing-directory
|
||||
plugins.push(
|
||||
new DescriptionFilePlugin(
|
||||
"undescribed-existing-directory",
|
||||
descriptionFiles,
|
||||
false,
|
||||
"existing-directory"
|
||||
)
|
||||
);
|
||||
mainFiles.forEach(item => {
|
||||
plugins.push(
|
||||
new UseFilePlugin(
|
||||
"undescribed-existing-directory",
|
||||
item,
|
||||
"undescribed-raw-file"
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
// described-existing-directory
|
||||
mainFields.forEach(item => {
|
||||
plugins.push(
|
||||
new MainFieldPlugin(
|
||||
"existing-directory",
|
||||
item,
|
||||
"resolve-in-existing-directory"
|
||||
)
|
||||
);
|
||||
});
|
||||
mainFiles.forEach(item => {
|
||||
plugins.push(
|
||||
new UseFilePlugin("existing-directory", item, "undescribed-raw-file")
|
||||
);
|
||||
});
|
||||
|
||||
// undescribed-raw-file
|
||||
plugins.push(
|
||||
new DescriptionFilePlugin(
|
||||
"undescribed-raw-file",
|
||||
descriptionFiles,
|
||||
true,
|
||||
"raw-file"
|
||||
)
|
||||
);
|
||||
plugins.push(new NextPlugin("after-undescribed-raw-file", "raw-file"));
|
||||
|
||||
// raw-file
|
||||
plugins.push(
|
||||
new ConditionalPlugin(
|
||||
"raw-file",
|
||||
{ fullySpecified: true },
|
||||
null,
|
||||
false,
|
||||
"file"
|
||||
)
|
||||
);
|
||||
if (!enforceExtension) {
|
||||
plugins.push(new TryNextPlugin("raw-file", "no extension", "file"));
|
||||
}
|
||||
extensions.forEach(item => {
|
||||
plugins.push(new AppendPlugin("raw-file", item, "file"));
|
||||
});
|
||||
|
||||
// file
|
||||
if (alias.length > 0)
|
||||
plugins.push(new AliasPlugin("file", alias, "internal-resolve"));
|
||||
aliasFields.forEach(item => {
|
||||
plugins.push(new AliasFieldPlugin("file", item, "internal-resolve"));
|
||||
});
|
||||
plugins.push(new NextPlugin("file", "final-file"));
|
||||
|
||||
// final-file
|
||||
plugins.push(new FileExistsPlugin("final-file", "existing-file"));
|
||||
|
||||
// existing-file
|
||||
if (symlinks)
|
||||
plugins.push(new SymlinkPlugin("existing-file", "existing-file"));
|
||||
plugins.push(new NextPlugin("existing-file", "resolved"));
|
||||
}
|
||||
|
||||
const resolved =
|
||||
/** @type {KnownHooks & EnsuredHooks} */
|
||||
(resolver.hooks).resolved;
|
||||
|
||||
// resolved
|
||||
if (restrictions.size > 0) {
|
||||
plugins.push(new RestrictionsPlugin(resolved, restrictions));
|
||||
}
|
||||
|
||||
plugins.push(new ResultPlugin(resolved));
|
||||
|
||||
//// RESOLVER ////
|
||||
|
||||
for (const plugin of plugins) {
|
||||
if (typeof plugin === "function") {
|
||||
/** @type {function(this: Resolver, Resolver): void} */
|
||||
(plugin).call(resolver, resolver);
|
||||
} else if (plugin) {
|
||||
plugin.apply(resolver);
|
||||
}
|
||||
}
|
||||
|
||||
return resolver;
|
||||
};
|
||||
|
||||
/**
|
||||
* Merging filtered elements
|
||||
* @param {string[]} array source array
|
||||
* @param {function(string): boolean} filter predicate
|
||||
* @returns {Array<string | string[]>} merge result
|
||||
*/
|
||||
function mergeFilteredToArray(array, filter) {
|
||||
/** @type {Array<string | string[]>} */
|
||||
const result = [];
|
||||
const set = new Set(array);
|
||||
|
||||
for (const item of set) {
|
||||
if (filter(item)) {
|
||||
const lastElement =
|
||||
result.length > 0 ? result[result.length - 1] : undefined;
|
||||
if (Array.isArray(lastElement)) {
|
||||
lastElement.push(item);
|
||||
} else {
|
||||
result.push([item]);
|
||||
}
|
||||
} else {
|
||||
result.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
Reference in New Issue
Block a user