This commit is contained in:
CHEVALLIER Abel
2025-11-13 16:23:22 +01:00
parent de9c515a47
commit cb235644dc
34924 changed files with 3811102 additions and 0 deletions

144
node_modules/path-to-regexp/dist/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,144 @@
/**
* Encode a string into another string.
*/
export type Encode = (value: string) => string;
/**
* Decode a string into another string.
*/
export type Decode = (value: string) => string;
export interface ParseOptions {
/**
* A function for encoding input strings.
*/
encodePath?: Encode;
}
export interface PathToRegexpOptions {
/**
* Matches the path completely without trailing characters. (default: `true`)
*/
end?: boolean;
/**
* Allows optional trailing delimiter to match. (default: `true`)
*/
trailing?: boolean;
/**
* Match will be case sensitive. (default: `false`)
*/
sensitive?: boolean;
/**
* The default delimiter for segments. (default: `'/'`)
*/
delimiter?: string;
}
export interface MatchOptions extends PathToRegexpOptions {
/**
* Function for decoding strings for params, or `false` to disable entirely. (default: `decodeURIComponent`)
*/
decode?: Decode | false;
}
export interface CompileOptions {
/**
* Function for encoding input strings for output into the path, or `false` to disable entirely. (default: `encodeURIComponent`)
*/
encode?: Encode | false;
/**
* The default delimiter for segments. (default: `'/'`)
*/
delimiter?: string;
}
/**
* Plain text.
*/
export interface Text {
type: "text";
value: string;
}
/**
* A parameter designed to match arbitrary text within a segment.
*/
export interface Parameter {
type: "param";
name: string;
}
/**
* A wildcard parameter designed to match multiple segments.
*/
export interface Wildcard {
type: "wildcard";
name: string;
}
/**
* A set of possible tokens to expand when matching.
*/
export interface Group {
type: "group";
tokens: Token[];
}
/**
* A token that corresponds with a regexp capture.
*/
export type Key = Parameter | Wildcard;
/**
* A sequence of `path-to-regexp` keys that match capturing groups.
*/
export type Keys = Array<Key>;
/**
* A sequence of path match characters.
*/
export type Token = Text | Parameter | Wildcard | Group;
/**
* Tokenized path instance.
*/
export declare class TokenData {
readonly tokens: Token[];
readonly originalPath?: string | undefined;
constructor(tokens: Token[], originalPath?: string | undefined);
}
/**
* ParseError is thrown when there is an error processing the path.
*/
export declare class PathError extends TypeError {
readonly originalPath: string | undefined;
constructor(message: string, originalPath: string | undefined);
}
/**
* Parse a string for the raw tokens.
*/
export declare function parse(str: string, options?: ParseOptions): TokenData;
/**
* Compile a string to a template function for the path.
*/
export declare function compile<P extends ParamData = ParamData>(path: Path, options?: CompileOptions & ParseOptions): (params?: P) => string;
export type ParamData = Partial<Record<string, string | string[]>>;
export type PathFunction<P extends ParamData> = (data?: P) => string;
/**
* A match result contains data about the path match.
*/
export interface MatchResult<P extends ParamData> {
path: string;
params: P;
}
/**
* A match is either `false` (no match) or a match result.
*/
export type Match<P extends ParamData> = false | MatchResult<P>;
/**
* The match function takes a string and returns whether it matched the path.
*/
export type MatchFunction<P extends ParamData> = (path: string) => Match<P>;
/**
* Supported path types.
*/
export type Path = string | TokenData;
/**
* Transform a path into a match function.
*/
export declare function match<P extends ParamData>(path: Path | Path[], options?: MatchOptions & ParseOptions): MatchFunction<P>;
export declare function pathToRegexp(path: Path | Path[], options?: PathToRegexpOptions & ParseOptions): {
regexp: RegExp;
keys: Keys;
};
/**
* Stringify token data into a path string.
*/
export declare function stringify(data: TokenData): string;

409
node_modules/path-to-regexp/dist/index.js generated vendored Normal file
View File

@@ -0,0 +1,409 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.PathError = exports.TokenData = void 0;
exports.parse = parse;
exports.compile = compile;
exports.match = match;
exports.pathToRegexp = pathToRegexp;
exports.stringify = stringify;
const DEFAULT_DELIMITER = "/";
const NOOP_VALUE = (value) => value;
const ID_START = /^[$_\p{ID_Start}]$/u;
const ID_CONTINUE = /^[$\u200c\u200d\p{ID_Continue}]$/u;
const SIMPLE_TOKENS = {
// Groups.
"{": "{",
"}": "}",
// Reserved.
"(": "(",
")": ")",
"[": "[",
"]": "]",
"+": "+",
"?": "?",
"!": "!",
};
/**
* Escape text for stringify to path.
*/
function escapeText(str) {
return str.replace(/[{}()\[\]+?!:*\\]/g, "\\$&");
}
/**
* Escape a regular expression string.
*/
function escape(str) {
return str.replace(/[.+*?^${}()[\]|/\\]/g, "\\$&");
}
/**
* Tokenized path instance.
*/
class TokenData {
constructor(tokens, originalPath) {
this.tokens = tokens;
this.originalPath = originalPath;
}
}
exports.TokenData = TokenData;
/**
* ParseError is thrown when there is an error processing the path.
*/
class PathError extends TypeError {
constructor(message, originalPath) {
let text = message;
if (originalPath)
text += `: ${originalPath}`;
text += `; visit https://git.new/pathToRegexpError for info`;
super(text);
this.originalPath = originalPath;
}
}
exports.PathError = PathError;
/**
* Parse a string for the raw tokens.
*/
function parse(str, options = {}) {
const { encodePath = NOOP_VALUE } = options;
const chars = [...str];
const tokens = [];
let index = 0;
let pos = 0;
function name() {
let value = "";
if (ID_START.test(chars[index])) {
do {
value += chars[index++];
} while (ID_CONTINUE.test(chars[index]));
}
else if (chars[index] === '"') {
let quoteStart = index;
while (index++ < chars.length) {
if (chars[index] === '"') {
index++;
quoteStart = 0;
break;
}
// Increment over escape characters.
if (chars[index] === "\\")
index++;
value += chars[index];
}
if (quoteStart) {
throw new PathError(`Unterminated quote at index ${quoteStart}`, str);
}
}
if (!value) {
throw new PathError(`Missing parameter name at index ${index}`, str);
}
return value;
}
while (index < chars.length) {
const value = chars[index];
const type = SIMPLE_TOKENS[value];
if (type) {
tokens.push({ type, index: index++, value });
}
else if (value === "\\") {
tokens.push({ type: "escape", index: index++, value: chars[index++] });
}
else if (value === ":") {
tokens.push({ type: "param", index: index++, value: name() });
}
else if (value === "*") {
tokens.push({ type: "wildcard", index: index++, value: name() });
}
else {
tokens.push({ type: "char", index: index++, value });
}
}
tokens.push({ type: "end", index, value: "" });
function consumeUntil(endType) {
const output = [];
while (true) {
const token = tokens[pos++];
if (token.type === endType)
break;
if (token.type === "char" || token.type === "escape") {
let path = token.value;
let cur = tokens[pos];
while (cur.type === "char" || cur.type === "escape") {
path += cur.value;
cur = tokens[++pos];
}
output.push({
type: "text",
value: encodePath(path),
});
continue;
}
if (token.type === "param" || token.type === "wildcard") {
output.push({
type: token.type,
name: token.value,
});
continue;
}
if (token.type === "{") {
output.push({
type: "group",
tokens: consumeUntil("}"),
});
continue;
}
throw new PathError(`Unexpected ${token.type} at index ${token.index}, expected ${endType}`, str);
}
return output;
}
return new TokenData(consumeUntil("end"), str);
}
/**
* Compile a string to a template function for the path.
*/
function compile(path, options = {}) {
const { encode = encodeURIComponent, delimiter = DEFAULT_DELIMITER } = options;
const data = typeof path === "object" ? path : parse(path, options);
const fn = tokensToFunction(data.tokens, delimiter, encode);
return function path(params = {}) {
const [path, ...missing] = fn(params);
if (missing.length) {
throw new TypeError(`Missing parameters: ${missing.join(", ")}`);
}
return path;
};
}
function tokensToFunction(tokens, delimiter, encode) {
const encoders = tokens.map((token) => tokenToFunction(token, delimiter, encode));
return (data) => {
const result = [""];
for (const encoder of encoders) {
const [value, ...extras] = encoder(data);
result[0] += value;
result.push(...extras);
}
return result;
};
}
/**
* Convert a single token into a path building function.
*/
function tokenToFunction(token, delimiter, encode) {
if (token.type === "text")
return () => [token.value];
if (token.type === "group") {
const fn = tokensToFunction(token.tokens, delimiter, encode);
return (data) => {
const [value, ...missing] = fn(data);
if (!missing.length)
return [value];
return [""];
};
}
const encodeValue = encode || NOOP_VALUE;
if (token.type === "wildcard" && encode !== false) {
return (data) => {
const value = data[token.name];
if (value == null)
return ["", token.name];
if (!Array.isArray(value) || value.length === 0) {
throw new TypeError(`Expected "${token.name}" to be a non-empty array`);
}
return [
value
.map((value, index) => {
if (typeof value !== "string") {
throw new TypeError(`Expected "${token.name}/${index}" to be a string`);
}
return encodeValue(value);
})
.join(delimiter),
];
};
}
return (data) => {
const value = data[token.name];
if (value == null)
return ["", token.name];
if (typeof value !== "string") {
throw new TypeError(`Expected "${token.name}" to be a string`);
}
return [encodeValue(value)];
};
}
/**
* Transform a path into a match function.
*/
function match(path, options = {}) {
const { decode = decodeURIComponent, delimiter = DEFAULT_DELIMITER } = options;
const { regexp, keys } = pathToRegexp(path, options);
const decoders = keys.map((key) => {
if (decode === false)
return NOOP_VALUE;
if (key.type === "param")
return decode;
return (value) => value.split(delimiter).map(decode);
});
return function match(input) {
const m = regexp.exec(input);
if (!m)
return false;
const path = m[0];
const params = Object.create(null);
for (let i = 1; i < m.length; i++) {
if (m[i] === undefined)
continue;
const key = keys[i - 1];
const decoder = decoders[i - 1];
params[key.name] = decoder(m[i]);
}
return { path, params };
};
}
function pathToRegexp(path, options = {}) {
const { delimiter = DEFAULT_DELIMITER, end = true, sensitive = false, trailing = true, } = options;
const keys = [];
const flags = sensitive ? "" : "i";
const sources = [];
for (const input of pathsToArray(path, [])) {
const data = typeof input === "object" ? input : parse(input, options);
for (const tokens of flatten(data.tokens, 0, [])) {
sources.push(toRegExpSource(tokens, delimiter, keys, data.originalPath));
}
}
let pattern = `^(?:${sources.join("|")})`;
if (trailing)
pattern += `(?:${escape(delimiter)}$)?`;
pattern += end ? "$" : `(?=${escape(delimiter)}|$)`;
const regexp = new RegExp(pattern, flags);
return { regexp, keys };
}
/**
* Convert a path or array of paths into a flat array.
*/
function pathsToArray(paths, init) {
if (Array.isArray(paths)) {
for (const p of paths)
pathsToArray(p, init);
}
else {
init.push(paths);
}
return init;
}
/**
* Generate a flat list of sequence tokens from the given tokens.
*/
function* flatten(tokens, index, init) {
if (index === tokens.length) {
return yield init;
}
const token = tokens[index];
if (token.type === "group") {
for (const seq of flatten(token.tokens, 0, init.slice())) {
yield* flatten(tokens, index + 1, seq);
}
}
else {
init.push(token);
}
yield* flatten(tokens, index + 1, init);
}
/**
* Transform a flat sequence of tokens into a regular expression.
*/
function toRegExpSource(tokens, delimiter, keys, originalPath) {
let result = "";
let backtrack = "";
let isSafeSegmentParam = true;
for (const token of tokens) {
if (token.type === "text") {
result += escape(token.value);
backtrack += token.value;
isSafeSegmentParam || (isSafeSegmentParam = token.value.includes(delimiter));
continue;
}
if (token.type === "param" || token.type === "wildcard") {
if (!isSafeSegmentParam && !backtrack) {
throw new PathError(`Missing text before "${token.name}" ${token.type}`, originalPath);
}
if (token.type === "param") {
result += `(${negate(delimiter, isSafeSegmentParam ? "" : backtrack)}+)`;
}
else {
result += `([\\s\\S]+)`;
}
keys.push(token);
backtrack = "";
isSafeSegmentParam = false;
continue;
}
}
return result;
}
/**
* Block backtracking on previous text and ignore delimiter string.
*/
function negate(delimiter, backtrack) {
if (backtrack.length < 2) {
if (delimiter.length < 2)
return `[^${escape(delimiter + backtrack)}]`;
return `(?:(?!${escape(delimiter)})[^${escape(backtrack)}])`;
}
if (delimiter.length < 2) {
return `(?:(?!${escape(backtrack)})[^${escape(delimiter)}])`;
}
return `(?:(?!${escape(backtrack)}|${escape(delimiter)})[\\s\\S])`;
}
/**
* Stringify an array of tokens into a path string.
*/
function stringifyTokens(tokens) {
let value = "";
let i = 0;
function name(value) {
const isSafe = isNameSafe(value) && isNextNameSafe(tokens[i]);
return isSafe ? value : JSON.stringify(value);
}
while (i < tokens.length) {
const token = tokens[i++];
if (token.type === "text") {
value += escapeText(token.value);
continue;
}
if (token.type === "group") {
value += `{${stringifyTokens(token.tokens)}}`;
continue;
}
if (token.type === "param") {
value += `:${name(token.name)}`;
continue;
}
if (token.type === "wildcard") {
value += `*${name(token.name)}`;
continue;
}
throw new TypeError(`Unknown token type: ${token.type}`);
}
return value;
}
/**
* Stringify token data into a path string.
*/
function stringify(data) {
return stringifyTokens(data.tokens);
}
/**
* Validate the parameter name contains valid ID characters.
*/
function isNameSafe(name) {
const [first, ...rest] = name;
return ID_START.test(first) && rest.every((char) => ID_CONTINUE.test(char));
}
/**
* Validate the next token does not interfere with the current param name.
*/
function isNextNameSafe(token) {
if (token && token.type === "text")
return !ID_CONTINUE.test(token.value[0]);
return true;
}
//# sourceMappingURL=index.js.map

1
node_modules/path-to-regexp/dist/index.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long