This commit is contained in:
CHEVALLIER Abel
2025-11-13 16:23:22 +01:00
parent de9c515a47
commit cb235644dc
34924 changed files with 3811102 additions and 0 deletions

18
node_modules/@npmcli/package-json/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,18 @@
ISC License
Copyright GitHub Inc.
Permission to use, copy, modify, and/or distribute this
software for any purpose with or without fee is hereby
granted, provided that the above copyright notice and this
permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
USE OR PERFORMANCE OF THIS SOFTWARE.

243
node_modules/@npmcli/package-json/README.md generated vendored Normal file
View File

@@ -0,0 +1,243 @@
# @npmcli/package-json
[![npm version](https://img.shields.io/npm/v/@npmcli/package-json)](https://www.npmjs.com/package/@npmcli/package-json)
[![Build Status](https://img.shields.io/github/actions/workflow/status/npm/package-json/ci.yml?branch=main)](https://github.com/npm/package-json)
Programmatic API to update `package.json` files. Updates and saves files the
same way the **npm cli** handles them.
## Install
`npm install @npmcli/package-json`
## Usage:
```js
const PackageJson = require('@npmcli/package-json')
const pkgJson = await PackageJson.load(path)
// $ cat package.json
// {
// "name": "foo",
// "version": "1.0.0",
// "dependencies": {
// "a": "^1.0.0",
// "abbrev": "^1.1.1"
// }
// }
pkgJson.update({
dependencies: {
a: '^1.0.0',
b: '^1.2.3',
},
workspaces: [
'./new-workspace',
],
})
await pkgJson.save()
// $ cat package.json
// {
// "name": "foo",
// "version": "1.0.0",
// "dependencies": {
// "a": "^1.0.0",
// "b": "^1.2.3"
// },
// "workspaces": [
// "./new-workspace"
// ]
// }
```
There is also a helper function exported for opening a package.json file
with no extra normalization or saving functionality.
```js
const { readPackage } = require('@npmcli/package-json/lib/read-package')
const rawData = await readPackage('./package.json')
// rawData will now have the package.json contents with no changes or normalizations
```
## API:
### `constructor()`
Creates a new empty instance of `PackageJson`.
---
### `async PackageJson.create(path)`
Creates an empty `package.json` at the given path. If one already exists
it will be overwritten.
---
### `async PackageJson.load(path, opts = {})`
Loads a `package.json` at the given path.
- `opts`: `Object` can contain:
- `create`: `Boolean` if true, a new package.json will be created if one does not already exist. Will not clobber ane existing package.json that can not be parsed.
### Example:
Loads contents of a `package.json` file located at `./`:
```js
const PackageJson = require('@npmcli/package-json')
const pkgJson = new PackageJson()
await pkgJson.load('./')
```
Throws an error in case a `package.json` file is missing or has invalid contents.
---
### **static** `async PackageJson.load(path)`
Convenience static method that returns a new instance and loads the contents of a `package.json` file from that location.
- `path`: `String` that points to the folder from where to read the `package.json` from
### Example:
Loads contents of a `package.json` file located at `./`:
```js
const PackageJson = require('@npmcli/package-json')
const pkgJson = await PackageJson.load('./')
```
---
### `async PackageJson.normalize()`
Intended for normalizing package.json files in a node_modules tree. Some light normalization is done to ensure that it is ready for use in `@npmcli/arborist`
- `path`: `String` that points to the folder from where to read the `package.json` from
- `opts`: `Object` can contain:
- `strict`: `Boolean` enables optional strict mode when applying the `normalizeData` step
- `steps`: `Array` optional normalization steps that will be applied to the `package.json` file, replacing the default steps
- `root`: `Path` optional git root to provide when applying the `gitHead` step
- `changes`: `Array` if provided, a message about each change that was made to the packument will be added to this array
---
### **static** `async PackageJson.normalize(path, opts = {})`
Convenience static that calls `load` before calling `normalize`
- `path`: `String` that points to the folder from where to read the `package.json` from
- `opts`: `Object` can contain:
- `strict`: `Boolean` enables optional strict mode when applying the `normalizeData` step
- `steps`: `Array` optional normalization steps that will be applied to the `package.json` file, replacing the default steps
- `root`: `Path` optional git root to provide when applying the `gitHead` step
- `changes`: `Array` if provided, a message about each change that was made to the packument will be added to this array
---
### `async PackageJson.prepare()`
Like `normalize` but intended for preparing package.json files for publish.
---
### **static** `async PackageJson.prepare(path, opts = {})`
Convenience static that calls `load` before calling `prepare`
- `path`: `String` that points to the folder from where to read the `package.json` from
- `opts`: `Object` can contain:
- `strict`: `Boolean` enables optional strict mode when applying the `normalizeData` step
- `steps`: `Array` optional normalization steps that will be applied to the `package.json` file, replacing the default steps
- `root`: `Path` optional git root to provide when applying the `gitHead` step
- `changes`: `Array` if provided, a message about each change that was made to the packument will be added to this array
---
### `async PackageJson.fix()`
Like `normalize` but intended for the `npm pkg fix` command.
---
### `PackageJson.update(content)`
Updates the contents of a `package.json` with the `content` provided.
- `content`: `Object` containing the properties to be updated/replaced in the
`package.json` file.
Special properties like `dependencies`, `devDependencies`,
`optionalDependencies`, `peerDependencies` will have special logic to handle
the update of these options, such as sorting and deduplication.
### Example:
Adds a new script named `new-script` to your `package.json` `scripts` property:
```js
const PackageJson = require('@npmcli/package-json')
const pkgJson = await PackageJson.load('./')
pkgJson.update({
scripts: {
...pkgJson.content.scripts,
'new-script': 'echo "Bom dia!"'
}
})
```
**NOTE:** When working with dependencies, it's important to provide values for
all known dependency types as the update logic has some interdependence in
between these properties.
### Example:
A safe way to add a `devDependency` AND remove all peer dependencies of an
existing `package.json`:
```js
const PackageJson = require('@npmcli/package-json')
const pkgJson = await PackageJson.load('./')
pkgJson.update({
dependencies: pkgJson.content.dependencies,
devDependencies: {
...pkgJson.content.devDependencies,
foo: '^foo@1.0.0',
},
peerDependencies: {},
optionalDependencies: pkgJson.content.optionalDependencies,
})
```
---
### **get** `PackageJson.content`
Getter that retrieves the normalized `Object` read from the loaded
`package.json` file.
### Example:
```js
const PackageJson = require('@npmcli/package-json')
const pkgJson = await PackageJson.load('./')
pkgJson.content
// -> {
// name: 'foo',
// version: '1.0.0'
// }
```
---
### `async PackageJson.save()`
Saves the current `content` to the same location used when calling
`load()`.
## LICENSE
[ISC](./LICENSE)

286
node_modules/@npmcli/package-json/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,286 @@
const { readFile, writeFile } = require('node:fs/promises')
const { resolve } = require('node:path')
const parseJSON = require('json-parse-even-better-errors')
const updateDeps = require('./update-dependencies.js')
const updateScripts = require('./update-scripts.js')
const updateWorkspaces = require('./update-workspaces.js')
const normalize = require('./normalize.js')
const { read, parse } = require('./read-package.js')
const { packageSort } = require('./sort.js')
// a list of handy specialized helper functions that take
// care of special cases that are handled by the npm cli
const knownSteps = new Set([
updateDeps,
updateScripts,
updateWorkspaces,
])
// list of all keys that are handled by "knownSteps" helpers
const knownKeys = new Set([
...updateDeps.knownKeys,
'scripts',
'workspaces',
])
class PackageJson {
static normalizeSteps = Object.freeze([
'_id',
'_attributes',
'bundledDependencies',
'bundleDependencies',
'optionalDedupe',
'scripts',
'funding',
'bin',
])
// npm pkg fix
static fixSteps = Object.freeze([
'binRefs',
'bundleDependencies',
'bundleDependenciesFalse',
'fixName',
'fixNameField',
'fixVersionField',
'fixRepositoryField',
'fixDependencies',
'devDependencies',
'scriptpath',
])
static prepareSteps = Object.freeze([
'_id',
'_attributes',
'bundledDependencies',
'bundleDependencies',
'bundleDependenciesDeleteFalse',
'gypfile',
'serverjs',
'scriptpath',
'authors',
'readme',
'mans',
'binDir',
'gitHead',
'fillTypes',
'normalizeData',
'binRefs',
])
// create a new empty package.json, so we can save at the given path even
// though we didn't start from a parsed file
static async create (path, opts = {}) {
const p = new PackageJson()
await p.create(path)
if (opts.data) {
return p.update(opts.data)
}
return p
}
// Loads a package.json at given path and JSON parses
static async load (path, opts = {}) {
const p = new PackageJson()
// Avoid try/catch if we aren't going to create
if (!opts.create) {
return p.load(path)
}
try {
return await p.load(path)
} catch (err) {
if (!err.message.startsWith('Could not read package.json')) {
throw err
}
return await p.create(path)
}
}
// npm pkg fix
static async fix (path, opts) {
const p = new PackageJson()
await p.load(path, true)
return p.fix(opts)
}
// read-package-json compatible behavior
static async prepare (path, opts) {
const p = new PackageJson()
await p.load(path, true)
return p.prepare(opts)
}
// read-package-json-fast compatible behavior
static async normalize (path, opts) {
const p = new PackageJson()
await p.load(path)
return p.normalize(opts)
}
#path
#manifest
#readFileContent = ''
#canSave = true
// Load content from given path
async load (path, parseIndex) {
this.#path = path
let parseErr
try {
this.#readFileContent = await read(this.filename)
} catch (err) {
if (!parseIndex) {
throw err
}
parseErr = err
}
if (parseErr) {
const indexFile = resolve(this.path, 'index.js')
let indexFileContent
try {
indexFileContent = await readFile(indexFile, 'utf8')
} catch (err) {
throw parseErr
}
try {
this.fromComment(indexFileContent)
} catch (err) {
throw parseErr
}
// This wasn't a package.json so prevent saving
this.#canSave = false
return this
}
return this.fromJSON(this.#readFileContent)
}
// Load data from a JSON string/buffer
fromJSON (data) {
this.#manifest = parse(data)
return this
}
fromContent (data) {
this.#manifest = data
this.#canSave = false
return this
}
// Load data from a comment
// /**package { "name": "foo", "version": "1.2.3", ... } **/
fromComment (data) {
data = data.split(/^\/\*\*package(?:\s|$)/m)
if (data.length < 2) {
throw new Error('File has no package in comments')
}
data = data[1]
data = data.split(/\*\*\/$/m)
if (data.length < 2) {
throw new Error('File has no package in comments')
}
data = data[0]
data = data.replace(/^\s*\*/mg, '')
this.#manifest = parseJSON(data)
return this
}
get content () {
return this.#manifest
}
get path () {
return this.#path
}
get filename () {
if (this.path) {
return resolve(this.path, 'package.json')
}
return undefined
}
create (path) {
this.#path = path
this.#manifest = {}
return this
}
// This should be the ONLY way to set content in the manifest
update (content) {
if (!this.content) {
throw new Error('Can not update without content. Please `load` or `create`')
}
for (const step of knownSteps) {
this.#manifest = step({ content, originalContent: this.content })
}
// unknown properties will just be overwitten
for (const [key, value] of Object.entries(content)) {
if (!knownKeys.has(key)) {
this.content[key] = value
}
}
return this
}
async save ({ sort } = {}) {
if (!this.#canSave) {
throw new Error('No package.json to save to')
}
const {
[Symbol.for('indent')]: indent,
[Symbol.for('newline')]: newline,
...rest
} = this.content
const format = indent === undefined ? ' ' : indent
const eol = newline === undefined ? '\n' : newline
const content = sort ? packageSort(rest) : rest
const fileContent = `${
JSON.stringify(content, null, format)
}\n`
.replace(/\n/g, eol)
if (fileContent.trim() !== this.#readFileContent.trim()) {
const written = await writeFile(this.filename, fileContent)
this.#readFileContent = fileContent
return written
}
}
async normalize (opts = {}) {
if (!opts.steps) {
opts.steps = this.constructor.normalizeSteps
}
await normalize(this, opts)
return this
}
async prepare (opts = {}) {
if (!opts.steps) {
opts.steps = this.constructor.prepareSteps
}
await normalize(this, opts)
return this
}
async fix (opts = {}) {
// This one is not overridable
opts.steps = this.constructor.fixSteps
await normalize(this, opts)
return this
}
}
module.exports = PackageJson

257
node_modules/@npmcli/package-json/lib/normalize-data.js generated vendored Normal file
View File

@@ -0,0 +1,257 @@
// Originally normalize-package-data
const url = require('node:url')
const hostedGitInfo = require('hosted-git-info')
const validateLicense = require('validate-npm-package-license')
const typos = {
dependancies: 'dependencies',
dependecies: 'dependencies',
depdenencies: 'dependencies',
devEependencies: 'devDependencies',
depends: 'dependencies',
'dev-dependencies': 'devDependencies',
devDependences: 'devDependencies',
devDepenencies: 'devDependencies',
devdependencies: 'devDependencies',
repostitory: 'repository',
repo: 'repository',
prefereGlobal: 'preferGlobal',
hompage: 'homepage',
hampage: 'homepage',
autohr: 'author',
autor: 'author',
contributers: 'contributors',
publicationConfig: 'publishConfig',
script: 'scripts',
}
const isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
// Extracts description from contents of a readme file in markdown format
function extractDescription (description) {
// the first block of text before the first heading that isn't the first line heading
const lines = description.trim().split('\n')
let start = 0
// skip initial empty lines and lines that start with #
while (lines[start]?.trim().match(/^(#|$)/)) {
start++
}
let end = start + 1
// keep going till we get to the end or an empty line
while (end < lines.length && lines[end].trim()) {
end++
}
return lines.slice(start, end).join(' ').trim()
}
function stringifyPerson (person) {
if (typeof person !== 'string') {
const name = person.name || ''
const u = person.url || person.web
const wrappedUrl = u ? (' (' + u + ')') : ''
const e = person.email || person.mail
const wrappedEmail = e ? (' <' + e + '>') : ''
person = name + wrappedEmail + wrappedUrl
}
const matchedName = person.match(/^([^(<]+)/)
const matchedUrl = person.match(/\(([^()]+)\)/)
const matchedEmail = person.match(/<([^<>]+)>/)
const parsed = {}
if (matchedName?.[0].trim()) {
parsed.name = matchedName[0].trim()
}
if (matchedEmail) {
parsed.email = matchedEmail[1]
}
if (matchedUrl) {
parsed.url = matchedUrl[1]
}
return parsed
}
function normalizeData (data, changes) {
// fixDescriptionField
if (data.description && typeof data.description !== 'string') {
changes?.push(`'description' field should be a string`)
delete data.description
}
if (data.readme && !data.description && data.readme !== 'ERROR: No README data found!') {
data.description = extractDescription(data.readme)
}
if (data.description === undefined) {
delete data.description
}
if (!data.description) {
changes?.push('No description')
}
// fixModulesField
if (data.modules) {
changes?.push(`modules field is deprecated`)
delete data.modules
}
// fixFilesField
const files = data.files
if (files && !Array.isArray(files)) {
changes?.push(`Invalid 'files' member`)
delete data.files
} else if (data.files) {
data.files = data.files.filter(function (file) {
if (!file || typeof file !== 'string') {
changes?.push(`Invalid filename in 'files' list: ${file}`)
return false
} else {
return true
}
})
}
// fixManField
if (data.man && typeof data.man === 'string') {
data.man = [data.man]
}
// fixBugsField
if (!data.bugs && data.repository?.url) {
const hosted = hostedGitInfo.fromUrl(data.repository.url)
if (hosted && hosted.bugs()) {
data.bugs = { url: hosted.bugs() }
}
} else if (data.bugs) {
if (typeof data.bugs === 'string') {
if (isEmail(data.bugs)) {
data.bugs = { email: data.bugs }
/* eslint-disable-next-line node/no-deprecated-api */
} else if (url.parse(data.bugs).protocol) {
data.bugs = { url: data.bugs }
} else {
changes?.push(`Bug string field must be url, email, or {email,url}`)
}
} else {
for (const k in data.bugs) {
if (['web', 'name'].includes(k)) {
changes?.push(`bugs['${k}'] should probably be bugs['url'].`)
data.bugs.url = data.bugs[k]
delete data.bugs[k]
}
}
const oldBugs = data.bugs
data.bugs = {}
if (oldBugs.url) {
/* eslint-disable-next-line node/no-deprecated-api */
if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
data.bugs.url = oldBugs.url
} else {
changes?.push('bugs.url field must be a string url. Deleted.')
}
}
if (oldBugs.email) {
if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
data.bugs.email = oldBugs.email
} else {
changes?.push('bugs.email field must be a string email. Deleted.')
}
}
}
if (!data.bugs.email && !data.bugs.url) {
delete data.bugs
changes?.push('Normalized value of bugs field is an empty object. Deleted.')
}
}
// fixKeywordsField
if (typeof data.keywords === 'string') {
data.keywords = data.keywords.split(/,\s+/)
}
if (data.keywords && !Array.isArray(data.keywords)) {
delete data.keywords
changes?.push(`keywords should be an array of strings`)
} else if (data.keywords) {
data.keywords = data.keywords.filter(function (kw) {
if (typeof kw !== 'string' || !kw) {
changes?.push(`keywords should be an array of strings`)
return false
} else {
return true
}
})
}
// fixBundleDependenciesField
const bdd = 'bundledDependencies'
const bd = 'bundleDependencies'
if (data[bdd] && !data[bd]) {
data[bd] = data[bdd]
delete data[bdd]
}
if (data[bd] && !Array.isArray(data[bd])) {
changes?.push(`Invalid 'bundleDependencies' list. Must be array of package names`)
delete data[bd]
} else if (data[bd]) {
data[bd] = data[bd].filter(function (filtered) {
if (!filtered || typeof filtered !== 'string') {
changes?.push(`Invalid bundleDependencies member: ${filtered}`)
return false
} else {
if (!data.dependencies) {
data.dependencies = {}
}
if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
changes?.push(`Non-dependency in bundleDependencies: ${filtered}`)
data.dependencies[filtered] = '*'
}
return true
}
})
}
// fixHomepageField
if (!data.homepage && data.repository && data.repository.url) {
const hosted = hostedGitInfo.fromUrl(data.repository.url)
if (hosted) {
data.homepage = hosted.docs()
}
}
if (data.homepage) {
if (typeof data.homepage !== 'string') {
changes?.push('homepage field must be a string url. Deleted.')
delete data.homepage
} else {
/* eslint-disable-next-line node/no-deprecated-api */
if (!url.parse(data.homepage).protocol) {
data.homepage = 'http://' + data.homepage
}
}
}
// fixReadmeField
if (!data.readme) {
changes?.push('No README data')
data.readme = 'ERROR: No README data found!'
}
// fixLicenseField
const license = data.license || data.licence
if (!license) {
changes?.push('No license field.')
} else if (typeof (license) !== 'string' || license.length < 1 || license.trim() === '') {
changes?.push('license should be a valid SPDX license expression')
} else if (!validateLicense(license).validForNewPackages) {
changes?.push('license should be a valid SPDX license expression')
}
// fixPeople
if (data.author) {
data.author = stringifyPerson(data.author)
}
['maintainers', 'contributors'].forEach(function (set) {
if (!Array.isArray(data[set])) {
return
}
data[set] = data[set].map(stringifyPerson)
})
// fixTypos
for (const d in typos) {
if (Object.prototype.hasOwnProperty.call(data, d)) {
changes?.push(`${d} should probably be ${typos[d]}.`)
}
}
}
module.exports = { normalizeData }

601
node_modules/@npmcli/package-json/lib/normalize.js generated vendored Normal file
View File

@@ -0,0 +1,601 @@
const valid = require('semver/functions/valid')
const clean = require('semver/functions/clean')
const fs = require('node:fs/promises')
const path = require('node:path')
const { log } = require('proc-log')
const moduleBuiltin = require('node:module')
/**
* @type {import('hosted-git-info')}
*/
let _hostedGitInfo
function lazyHostedGitInfo () {
if (!_hostedGitInfo) {
_hostedGitInfo = require('hosted-git-info')
}
return _hostedGitInfo
}
/**
* @type {import('glob').glob}
*/
let _glob
function lazyLoadGlob () {
if (!_glob) {
_glob = require('glob').glob
}
return _glob
}
// used to be npm-normalize-package-bin
function normalizePackageBin (pkg, changes) {
if (pkg.bin) {
if (typeof pkg.bin === 'string' && pkg.name) {
changes?.push('"bin" was converted to an object')
pkg.bin = { [pkg.name]: pkg.bin }
} else if (Array.isArray(pkg.bin)) {
changes?.push('"bin" was converted to an object')
pkg.bin = pkg.bin.reduce((acc, k) => {
acc[path.basename(k)] = k
return acc
}, {})
}
if (typeof pkg.bin === 'object') {
for (const binKey in pkg.bin) {
if (typeof pkg.bin[binKey] !== 'string') {
delete pkg.bin[binKey]
changes?.push(`removed invalid "bin[${binKey}]"`)
continue
}
const base = path.basename(secureAndUnixifyPath(binKey))
if (!base) {
delete pkg.bin[binKey]
changes?.push(`removed invalid "bin[${binKey}]"`)
continue
}
const binTarget = secureAndUnixifyPath(pkg.bin[binKey])
if (!binTarget) {
delete pkg.bin[binKey]
changes?.push(`removed invalid "bin[${binKey}]"`)
continue
}
if (base !== binKey) {
delete pkg.bin[binKey]
changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`)
}
if (binTarget !== pkg.bin[binKey]) {
changes?.push(`"bin[${base}]" script name was cleaned`)
}
pkg.bin[base] = binTarget
}
if (Object.keys(pkg.bin).length === 0) {
changes?.push('empty "bin" was removed')
delete pkg.bin
}
return pkg
}
}
delete pkg.bin
}
function normalizePackageMan (pkg, changes) {
if (pkg.man) {
const mans = []
for (const man of (Array.isArray(pkg.man) ? pkg.man : [pkg.man])) {
if (typeof man !== 'string') {
changes?.push(`removed invalid "man [${man}]"`)
} else {
mans.push(secureAndUnixifyPath(man))
}
}
if (!mans.length) {
changes?.push('empty "man" was removed')
} else {
pkg.man = mans
return pkg
}
}
delete pkg.man
}
function isCorrectlyEncodedName (spec) {
return !spec.match(/[/@\s+%:]/) &&
spec === encodeURIComponent(spec)
}
function isValidScopedPackageName (spec) {
if (spec.charAt(0) !== '@') {
return false
}
const rest = spec.slice(1).split('/')
if (rest.length !== 2) {
return false
}
return rest[0] && rest[1] &&
rest[0] === encodeURIComponent(rest[0]) &&
rest[1] === encodeURIComponent(rest[1])
}
function unixifyPath (ref) {
return ref.replace(/\\|:/g, '/')
}
function secureAndUnixifyPath (ref) {
const secured = unixifyPath(path.join('.', path.join('/', unixifyPath(ref))))
return secured.startsWith('./') ? '' : secured
}
// We don't want the `changes` array in here by default because this is a hot
// path for parsing packuments during install. So the calling method passes it
// in if it wants to track changes.
const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) => {
if (!pkg.content) {
throw new Error('Can not normalize without content')
}
const data = pkg.content
const scripts = data.scripts || {}
const pkgId = `${data.name ?? ''}@${data.version ?? ''}`
// name and version are load bearing so we have to clean them up first
if (steps.includes('fixName') || steps.includes('fixNameField') || steps.includes('normalizeData')) {
if (!data.name && !strict) {
changes?.push('Missing "name" field was set to an empty string')
data.name = ''
} else {
if (typeof data.name !== 'string') {
throw new Error('name field must be a string.')
}
if (!strict) {
const name = data.name.trim()
if (data.name !== name) {
changes?.push(`Whitespace was trimmed from "name"`)
data.name = name
}
}
if (data.name.startsWith('.') ||
!(isValidScopedPackageName(data.name) || isCorrectlyEncodedName(data.name)) ||
(strict && (!allowLegacyCase) && data.name !== data.name.toLowerCase()) ||
data.name.toLowerCase() === 'node_modules' ||
data.name.toLowerCase() === 'favicon.ico') {
throw new Error('Invalid name: ' + JSON.stringify(data.name))
}
}
}
if (steps.includes('fixName')) {
// Check for conflicts with builtin modules
if (moduleBuiltin.builtinModules.includes(data.name)) {
log.warn('package-json', pkgId, `Package name "${data.name}" conflicts with a Node.js built-in module name`)
}
}
if (steps.includes('fixVersionField') || steps.includes('normalizeData')) {
// allow "loose" semver 1.0 versions in non-strict mode
// enforce strict semver 2.0 compliance in strict mode
const loose = !strict
if (!data.version) {
data.version = ''
} else {
if (!valid(data.version, loose)) {
throw new Error(`Invalid version: "${data.version}"`)
}
const version = clean(data.version, loose)
if (version !== data.version) {
changes?.push(`"version" was cleaned and set to "${version}"`)
data.version = version
}
}
}
// remove attributes that start with "_"
if (steps.includes('_attributes')) {
for (const key in data) {
if (key.startsWith('_')) {
changes?.push(`"${key}" was removed`)
delete pkg.content[key]
}
}
}
// build the "_id" attribute
if (steps.includes('_id')) {
if (data.name && data.version) {
changes?.push(`"_id" was set to ${pkgId}`)
data._id = pkgId
}
}
// fix bundledDependencies typo
// normalize bundleDependencies
if (steps.includes('bundledDependencies')) {
if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) {
data.bundleDependencies = data.bundledDependencies
}
changes?.push(`Deleted incorrect "bundledDependencies"`)
delete data.bundledDependencies
}
// expand "bundleDependencies: true or translate from object"
if (steps.includes('bundleDependencies')) {
const bd = data.bundleDependencies
if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) {
changes?.push(`"bundleDependencies" was changed from "false" to "[]"`)
data.bundleDependencies = []
} else if (bd === true) {
changes?.push(`"bundleDependencies" was auto-populated from "dependencies"`)
data.bundleDependencies = Object.keys(data.dependencies || {})
} else if (bd && typeof bd === 'object') {
if (!Array.isArray(bd)) {
changes?.push(`"bundleDependencies" was changed from an object to an array`)
data.bundleDependencies = Object.keys(bd)
}
} else if ('bundleDependencies' in data) {
changes?.push(`"bundleDependencies" was removed`)
delete data.bundleDependencies
}
}
// it was once common practice to list deps both in optionalDependencies and
// in dependencies, to support npm versions that did not know about
// optionalDependencies. This is no longer a relevant need, so duplicating
// the deps in two places is unnecessary and excessive.
if (steps.includes('optionalDedupe')) {
if (data.dependencies &&
data.optionalDependencies && typeof data.optionalDependencies === 'object') {
for (const name in data.optionalDependencies) {
changes?.push(`optionalDependencies."${name}" was removed`)
delete data.dependencies[name]
}
if (!Object.keys(data.dependencies).length) {
changes?.push(`Empty "optionalDependencies" was removed`)
delete data.dependencies
}
}
}
// add "install" attribute if any "*.gyp" files exist
if (steps.includes('gypfile')) {
if (!scripts.install && !scripts.preinstall && data.gypfile !== false) {
const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path })
if (files.length) {
scripts.install = 'node-gyp rebuild'
data.scripts = scripts
data.gypfile = true
changes?.push(`"scripts.install" was set to "node-gyp rebuild"`)
changes?.push(`"gypfile" was set to "true"`)
}
}
}
// add "start" attribute if "server.js" exists
if (steps.includes('serverjs') && !scripts.start) {
try {
await fs.access(path.join(pkg.path, 'server.js'))
scripts.start = 'node server.js'
data.scripts = scripts
changes?.push('"scripts.start" was set to "node server.js"')
} catch {
// do nothing
}
}
// strip "node_modules/.bin" from scripts entries
// remove invalid scripts entries (non-strings)
if ((steps.includes('scripts') || steps.includes('scriptpath')) && data.scripts !== undefined) {
const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
if (typeof data.scripts === 'object') {
for (const name in data.scripts) {
if (typeof data.scripts[name] !== 'string') {
delete data.scripts[name]
changes?.push(`Invalid scripts."${name}" was removed`)
} else if (steps.includes('scriptpath') && spre.test(data.scripts[name])) {
data.scripts[name] = data.scripts[name].replace(spre, '')
changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`)
}
}
} else {
changes?.push(`Removed invalid "scripts"`)
delete data.scripts
}
}
if (steps.includes('funding')) {
if (data.funding && typeof data.funding === 'string') {
data.funding = { url: data.funding }
changes?.push(`"funding" was changed to an object with a url attribute`)
}
}
// populate "authors" attribute
if (steps.includes('authors') && !data.contributors) {
try {
const authorData = await fs.readFile(path.join(pkg.path, 'AUTHORS'), 'utf8')
const authors = authorData.split(/\r?\n/g)
.map(line => line.replace(/^\s*#.*$/, '').trim())
.filter(line => line)
data.contributors = authors
changes?.push('"contributors" was auto-populated with the contents of the "AUTHORS" file')
} catch {
// do nothing
}
}
// populate "readme" attribute
if (steps.includes('readme') && !data.readme) {
const mdre = /\.m?a?r?k?d?o?w?n?$/i
const files = await lazyLoadGlob()('{README,README.*}', {
cwd: pkg.path,
nocase: true,
mark: true,
})
let readmeFile
for (const file of files) {
// don't accept directories.
if (!file.endsWith(path.sep)) {
if (file.match(mdre)) {
readmeFile = file
break
}
if (file.endsWith('README')) {
readmeFile = file
}
}
}
if (readmeFile) {
const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8')
data.readme = readmeData
data.readmeFilename = readmeFile
changes?.push(`"readme" was set to the contents of ${readmeFile}`)
changes?.push(`"readmeFilename" was set to ${readmeFile}`)
}
if (!data.readme) {
data.readme = 'ERROR: No README data found!'
}
}
// expand directories.man
if (steps.includes('mans')) {
if (data.directories?.man && !data.man) {
const manDir = secureAndUnixifyPath(data.directories.man)
const cwd = path.resolve(pkg.path, manDir)
const files = await lazyLoadGlob()('**/*.[0-9]', { cwd })
data.man = files.map(man =>
path.relative(pkg.path, path.join(cwd, man)).split(path.sep).join('/')
)
}
normalizePackageMan(data, changes)
}
if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) {
normalizePackageBin(data, changes)
}
// expand "directories.bin"
if (steps.includes('binDir') && data.directories?.bin && !data.bin) {
const binsDir = path.resolve(pkg.path, secureAndUnixifyPath(data.directories.bin))
const bins = await lazyLoadGlob()('**', { cwd: binsDir })
data.bin = bins.reduce((acc, binFile) => {
if (binFile && !binFile.startsWith('.')) {
const binName = path.basename(binFile)
acc[binName] = path.join(data.directories.bin, binFile)
}
return acc
}, {})
// *sigh*
normalizePackageBin(data, changes)
}
// populate "gitHead" attribute
if (steps.includes('gitHead') && !data.gitHead) {
const git = require('@npmcli/git')
const gitRoot = await git.find({ cwd: pkg.path, root })
let head
if (gitRoot) {
try {
head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8')
} catch (err) {
// do nothing
}
}
let headData
if (head) {
if (head.startsWith('ref: ')) {
const headRef = head.replace(/^ref: /, '').trim()
const headFile = path.resolve(gitRoot, '.git', headRef)
try {
headData = await fs.readFile(headFile, 'utf8')
headData = headData.replace(/^ref: /, '').trim()
} catch (err) {
// do nothing
}
if (!headData) {
const packFile = path.resolve(gitRoot, '.git/packed-refs')
try {
let refs = await fs.readFile(packFile, 'utf8')
if (refs) {
refs = refs.split('\n')
for (let i = 0; i < refs.length; i++) {
const match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
if (match && match[2].trim() === headRef) {
headData = match[1]
break
}
}
}
} catch {
// do nothing
}
}
} else {
headData = head.trim()
}
}
if (headData) {
data.gitHead = headData
}
}
// populate "types" attribute
if (steps.includes('fillTypes')) {
const index = data.main || 'index.js'
if (typeof index !== 'string') {
throw new TypeError('The "main" attribute must be of type string.')
}
// TODO exports is much more complicated than this in verbose format
// We need to support for instance
// "exports": {
// ".": [
// {
// "default": "./lib/npm.js"
// },
// "./lib/npm.js"
// ],
// "./package.json": "./package.json"
// },
// as well as conditional exports
// if (data.exports && typeof data.exports === 'string') {
// index = data.exports
// }
// if (data.exports && data.exports['.']) {
// index = data.exports['.']
// if (typeof index !== 'string') {
// }
// }
const extless = path.join(path.dirname(index), path.basename(index, path.extname(index)))
const dts = `./${extless}.d.ts`
const hasDTSFields = 'types' in data || 'typings' in data
if (!hasDTSFields) {
try {
await fs.access(path.join(pkg.path, dts))
data.types = dts.split(path.sep).join('/')
} catch {
// do nothing
}
}
}
// "normalizeData" from "read-package-json", which was just a call through to
// "normalize-package-data". We only call the "fixer" functions because
// outside of that it was also clobbering _id (which we already conditionally
// do) and also adding the gypfile script (which we also already
// conditionally do)
// Some steps are isolated so we can do a limited subset of these in `fix`
if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) {
if (data.repositories) {
changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`)
data.repository = data.repositories[0]
}
if (data.repository) {
if (typeof data.repository === 'string') {
changes?.push('"repository" was changed from a string to an object')
data.repository = {
type: 'git',
url: data.repository,
}
}
if (data.repository.url) {
const hosted = lazyHostedGitInfo().fromUrl(data.repository.url)
let r
if (hosted) {
if (hosted.getDefaultRepresentation() === 'shortcut') {
r = hosted.https()
} else {
r = hosted.toString()
}
if (r !== data.repository.url) {
changes?.push(`"repository.url" was normalized to "${r}"`)
data.repository.url = r
}
}
}
}
}
if (steps.includes('fixDependencies') || steps.includes('normalizeData')) {
// peerDependencies?
// devDependencies is meaningless here, it's ignored on an installed package
for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) {
if (data[type]) {
let secondWarning = true
if (typeof data[type] === 'string') {
changes?.push(`"${type}" was converted from a string into an object`)
data[type] = data[type].trim().split(/[\n\r\s\t ,]+/)
secondWarning = false
}
if (Array.isArray(data[type])) {
if (secondWarning) {
changes?.push(`"${type}" was converted from an array into an object`)
}
const o = {}
for (const d of data[type]) {
if (typeof d === 'string') {
const dep = d.trim().split(/(:?[@\s><=])/)
const dn = dep.shift()
const dv = dep.join('').replace(/^@/, '').trim()
o[dn] = dv
}
}
data[type] = o
}
}
}
// normalize-package-data used to put optional dependencies BACK into
// dependencies here, we no longer do this
for (const deps of ['dependencies', 'devDependencies']) {
if (deps in data) {
if (!data[deps] || typeof data[deps] !== 'object') {
changes?.push(`Removed invalid "${deps}"`)
delete data[deps]
} else {
for (const d in data[deps]) {
const r = data[deps][d]
if (typeof r !== 'string') {
changes?.push(`Removed invalid "${deps}.${d}"`)
delete data[deps][d]
}
const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString()
if (hosted && hosted !== data[deps][d]) {
changes?.push(`Normalized git reference to "${deps}.${d}"`)
data[deps][d] = hosted.toString()
}
}
}
}
}
}
// TODO some of this is duplicated in other steps here, a future breaking change may be able to remove the duplicates involved in this step
if (steps.includes('normalizeData')) {
const { normalizeData } = require('./normalize-data.js')
normalizeData(data, changes)
}
// Warn if the bin references don't point to anything. This might be better
// in normalize-package-data if it had access to the file path.
if (steps.includes('binRefs') && data.bin instanceof Object) {
for (const key in data.bin) {
try {
await fs.access(path.resolve(pkg.path, data.bin[key]))
} catch {
log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`)
// XXX: should a future breaking change delete bin entries that cannot be accessed?
}
}
}
}
module.exports = normalize

39
node_modules/@npmcli/package-json/lib/read-package.js generated vendored Normal file
View File

@@ -0,0 +1,39 @@
// This is JUST the code needed to open a package.json file and parse it.
// It's isolated out so that code needing to parse a package.json file can do so in the same way as this module does, without needing to require the whole module, or needing to require the underlying parsing library.
const { readFile } = require('fs/promises')
const parseJSON = require('json-parse-even-better-errors')
async function read (filename) {
try {
const data = await readFile(filename, 'utf8')
return data
} catch (err) {
err.message = `Could not read package.json: ${err}`
throw err
}
}
function parse (data) {
try {
const content = parseJSON(data)
return content
} catch (err) {
err.message = `Invalid package.json: ${err}`
throw err
}
}
// This is what most external libs will use.
// PackageJson will call read and parse separately
async function readPackage (filename) {
const data = await read(filename)
const content = parse(data)
return content
}
module.exports = {
read,
parse,
readPackage,
}

101
node_modules/@npmcli/package-json/lib/sort.js generated vendored Normal file
View File

@@ -0,0 +1,101 @@
/**
* arbitrary sort order for package.json largely pulled from:
* https://github.com/keithamus/sort-package-json/blob/main/defaultRules.md
*
* cross checked with:
* https://github.com/npm/types/blob/main/types/index.d.ts#L104
* https://docs.npmjs.com/cli/configuring-npm/package-json
*/
function packageSort (json) {
const {
name,
version,
private: isPrivate,
description,
keywords,
homepage,
bugs,
repository,
funding,
license,
author,
maintainers,
contributors,
type,
imports,
exports,
main,
browser,
types,
bin,
man,
directories,
files,
workspaces,
scripts,
config,
dependencies,
devDependencies,
peerDependencies,
peerDependenciesMeta,
optionalDependencies,
bundledDependencies,
bundleDependencies,
engines,
os,
cpu,
publishConfig,
devEngines,
licenses,
overrides,
...rest
} = json
return {
...(typeof name !== 'undefined' ? { name } : {}),
...(typeof version !== 'undefined' ? { version } : {}),
...(typeof isPrivate !== 'undefined' ? { private: isPrivate } : {}),
...(typeof description !== 'undefined' ? { description } : {}),
...(typeof keywords !== 'undefined' ? { keywords } : {}),
...(typeof homepage !== 'undefined' ? { homepage } : {}),
...(typeof bugs !== 'undefined' ? { bugs } : {}),
...(typeof repository !== 'undefined' ? { repository } : {}),
...(typeof funding !== 'undefined' ? { funding } : {}),
...(typeof license !== 'undefined' ? { license } : {}),
...(typeof author !== 'undefined' ? { author } : {}),
...(typeof maintainers !== 'undefined' ? { maintainers } : {}),
...(typeof contributors !== 'undefined' ? { contributors } : {}),
...(typeof type !== 'undefined' ? { type } : {}),
...(typeof imports !== 'undefined' ? { imports } : {}),
...(typeof exports !== 'undefined' ? { exports } : {}),
...(typeof main !== 'undefined' ? { main } : {}),
...(typeof browser !== 'undefined' ? { browser } : {}),
...(typeof types !== 'undefined' ? { types } : {}),
...(typeof bin !== 'undefined' ? { bin } : {}),
...(typeof man !== 'undefined' ? { man } : {}),
...(typeof directories !== 'undefined' ? { directories } : {}),
...(typeof files !== 'undefined' ? { files } : {}),
...(typeof workspaces !== 'undefined' ? { workspaces } : {}),
...(typeof scripts !== 'undefined' ? { scripts } : {}),
...(typeof config !== 'undefined' ? { config } : {}),
...(typeof dependencies !== 'undefined' ? { dependencies } : {}),
...(typeof devDependencies !== 'undefined' ? { devDependencies } : {}),
...(typeof peerDependencies !== 'undefined' ? { peerDependencies } : {}),
...(typeof peerDependenciesMeta !== 'undefined' ? { peerDependenciesMeta } : {}),
...(typeof optionalDependencies !== 'undefined' ? { optionalDependencies } : {}),
...(typeof bundledDependencies !== 'undefined' ? { bundledDependencies } : {}),
...(typeof bundleDependencies !== 'undefined' ? { bundleDependencies } : {}),
...(typeof engines !== 'undefined' ? { engines } : {}),
...(typeof os !== 'undefined' ? { os } : {}),
...(typeof cpu !== 'undefined' ? { cpu } : {}),
...(typeof publishConfig !== 'undefined' ? { publishConfig } : {}),
...(typeof devEngines !== 'undefined' ? { devEngines } : {}),
...(typeof licenses !== 'undefined' ? { licenses } : {}),
...(typeof overrides !== 'undefined' ? { overrides } : {}),
...rest,
}
}
module.exports = {
packageSort,
}

View File

@@ -0,0 +1,75 @@
const depTypes = new Set([
'dependencies',
'optionalDependencies',
'devDependencies',
'peerDependencies',
])
// sort alphabetically all types of deps for a given package
const orderDeps = (content) => {
for (const type of depTypes) {
if (content && content[type]) {
content[type] = Object.keys(content[type])
.sort((a, b) => a.localeCompare(b, 'en'))
.reduce((res, key) => {
res[key] = content[type][key]
return res
}, {})
}
}
return content
}
const updateDependencies = ({ content, originalContent }) => {
const pkg = orderDeps({
...content,
})
// optionalDependencies don't need to be repeated in two places
if (pkg.dependencies) {
if (pkg.optionalDependencies) {
for (const name of Object.keys(pkg.optionalDependencies)) {
delete pkg.dependencies[name]
}
}
}
const result = { ...originalContent }
// loop through all types of dependencies and update package json pkg
for (const type of depTypes) {
if (pkg[type]) {
result[type] = pkg[type]
}
// prune empty type props from resulting object
const emptyDepType =
pkg[type]
&& typeof pkg === 'object'
&& Object.keys(pkg[type]).length === 0
if (emptyDepType) {
delete result[type]
}
}
// if original package.json had dep in peerDeps AND deps, preserve that.
const { dependencies: origProd, peerDependencies: origPeer } =
originalContent || {}
const { peerDependencies: newPeer } = result
if (origProd && origPeer && newPeer) {
// we have original prod/peer deps, and new peer deps
// copy over any that were in both in the original
for (const name of Object.keys(origPeer)) {
if (origProd[name] !== undefined && newPeer[name] !== undefined) {
result.dependencies = result.dependencies || {}
result.dependencies[name] = newPeer[name]
}
}
}
return result
}
updateDependencies.knownKeys = depTypes
module.exports = updateDependencies

View File

@@ -0,0 +1,29 @@
const updateScripts = ({ content, originalContent = {} }) => {
const newScripts = content.scripts
if (!newScripts) {
return originalContent
}
// validate scripts content being appended
const hasInvalidScripts = () =>
Object.entries(newScripts)
.some(([key, value]) =>
typeof key !== 'string' || typeof value !== 'string')
if (hasInvalidScripts()) {
throw Object.assign(
new TypeError(
'package.json scripts should be a key-value pair of strings.'),
{ code: 'ESCRIPTSINVALID' }
)
}
return {
...originalContent,
scripts: {
...newScripts,
},
}
}
module.exports = updateScripts

View File

@@ -0,0 +1,26 @@
const updateWorkspaces = ({ content, originalContent = {} }) => {
const newWorkspaces = content.workspaces
if (!newWorkspaces) {
return originalContent
}
// validate workspaces content being appended
const hasInvalidWorkspaces = () =>
newWorkspaces.some(w => !(typeof w === 'string'))
if (!newWorkspaces.length || hasInvalidWorkspaces()) {
throw Object.assign(
new TypeError('workspaces should be an array of strings.'),
{ code: 'EWORKSPACESINVALID' }
)
}
return {
...originalContent,
workspaces: [
...newWorkspaces,
],
}
}
module.exports = updateWorkspaces

View File

@@ -0,0 +1,13 @@
Copyright (c) 2015, Rebecca Turner
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.

View File

@@ -0,0 +1,138 @@
# hosted-git-info
This will let you identify and transform various git hosts URLs between
protocols. It also can tell you what the URL is for the raw path for
particular file for direct access without git.
## Example
```javascript
const hostedGitInfo = require("hosted-git-info")
const info = hostedGitInfo.fromUrl("git@github.com:npm/hosted-git-info.git", opts)
/* info looks like:
{
type: "github",
domain: "github.com",
user: "npm",
project: "hosted-git-info"
}
*/
```
If the URL can't be matched with a git host, `null` will be returned. We
can match git, ssh and https urls. Additionally, we can match ssh connect
strings (`git@github.com:npm/hosted-git-info`) and shortcuts (eg,
`github:npm/hosted-git-info`). GitHub specifically, is detected in the case
of a third, unprefixed, form: `npm/hosted-git-info`.
If it does match, the returned object has properties of:
* info.type -- The short name of the service
* info.domain -- The domain for git protocol use
* info.user -- The name of the user/org on the git host
* info.project -- The name of the project on the git host
## Version Contract
The major version will be bumped any time…
* The constructor stops accepting URLs that it previously accepted.
* A method is removed.
* A method can no longer accept the number and type of arguments it previously accepted.
* A method can return a different type than it currently returns.
Implications:
* I do not consider the specific format of the urls returned from, say
`.https()` to be a part of the contract. The contract is that it will
return a string that can be used to fetch the repo via HTTPS. But what
that string looks like, specifically, can change.
* Dropping support for a hosted git provider would constitute a breaking
change.
## Usage
### const info = hostedGitInfo.fromUrl(gitSpecifier[, options])
* *gitSpecifer* is a URL of a git repository or a SCP-style specifier of one.
* *options* is an optional object. It can have the following properties:
* *noCommittish* — If true then committishes won't be included in generated URLs.
* *noGitPlus* — If true then `git+` won't be prefixed on URLs.
### const infoOrURL = hostedGitInfo.fromManifest(manifest[, options])
* *manifest* is a package manifest, such as that returned by [`pacote.manifest()`](https://npmjs.com/pacote)
* *options* is an optional object. It can have the same properties as `fromUrl` above.
## Methods
All of the methods take the same options as the `fromUrl` factory. Options
provided to a method override those provided to the constructor.
* info.file(path, opts)
Given the path of a file relative to the repository, returns a URL for
directly fetching it from the githost. If no committish was set then
`HEAD` will be used as the default.
For example `hostedGitInfo.fromUrl("git@github.com:npm/hosted-git-info.git#v1.0.0").file("package.json")`
would return `https://raw.githubusercontent.com/npm/hosted-git-info/v1.0.0/package.json`
* info.shortcut(opts)
eg, `github:npm/hosted-git-info`
* info.browse(path, fragment, opts)
eg, `https://github.com/npm/hosted-git-info/tree/v1.2.0`,
`https://github.com/npm/hosted-git-info/tree/v1.2.0/package.json`,
`https://github.com/npm/hosted-git-info/tree/v1.2.0/README.md#supported-hosts`
* info.bugs(opts)
eg, `https://github.com/npm/hosted-git-info/issues`
* info.docs(opts)
eg, `https://github.com/npm/hosted-git-info/tree/v1.2.0#readme`
* info.https(opts)
eg, `git+https://github.com/npm/hosted-git-info.git`
* info.sshurl(opts)
eg, `git+ssh://git@github.com/npm/hosted-git-info.git`
* info.ssh(opts)
eg, `git@github.com:npm/hosted-git-info.git`
* info.path(opts)
eg, `npm/hosted-git-info`
* info.tarball(opts)
eg, `https://github.com/npm/hosted-git-info/archive/v1.2.0.tar.gz`
* info.getDefaultRepresentation()
Returns the default output type. The default output type is based on the
string you passed in to be parsed
* info.toString(opts)
Uses the getDefaultRepresentation to call one of the other methods to get a URL for
this resource. As such `hostedGitInfo.fromUrl(url).toString()` will give
you a normalized version of the URL that still uses the same protocol.
Shortcuts will still be returned as shortcuts, but the special case github
form of `org/project` will be normalized to `github:org/project`.
SSH connect strings will be normalized into `git+ssh` URLs.
## Supported hosts
Currently this supports GitHub (including Gists), Bitbucket, GitLab and Sourcehut.
Pull requests for additional hosts welcome.

View File

@@ -0,0 +1,122 @@
'use strict'
const parseUrl = require('./parse-url')
// look for github shorthand inputs, such as npm/cli
const isGitHubShorthand = (arg) => {
// it cannot contain whitespace before the first #
// it cannot start with a / because that's probably an absolute file path
// but it must include a slash since repos are username/repository
// it cannot start with a . because that's probably a relative file path
// it cannot start with an @ because that's a scoped package if it passes the other tests
// it cannot contain a : before a # because that tells us that there's a protocol
// a second / may not exist before a #
const firstHash = arg.indexOf('#')
const firstSlash = arg.indexOf('/')
const secondSlash = arg.indexOf('/', firstSlash + 1)
const firstColon = arg.indexOf(':')
const firstSpace = /\s/.exec(arg)
const firstAt = arg.indexOf('@')
const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
const hasSlash = firstSlash > 0
// if a # is found, what we really want to know is that the character
// immediately before # is not a /
const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
const doesNotStartWithDot = !arg.startsWith('.')
return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
secondSlashOnlyAfterHash
}
module.exports = (giturl, opts, { gitHosts, protocols }) => {
if (!giturl) {
return
}
const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
const parsed = parseUrl(correctedUrl, protocols)
if (!parsed) {
return
}
const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
? parsed.hostname.slice(4)
: parsed.hostname]
const gitHostName = gitHostShortcut || gitHostDomain
if (!gitHostName) {
return
}
const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
let auth = null
if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
}
let committish = null
let user = null
let project = null
let defaultRepresentation = null
try {
if (gitHostShortcut) {
let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
const firstAt = pathname.indexOf('@')
// we ignore auth for shortcuts, so just trim it out
if (firstAt > -1) {
pathname = pathname.slice(firstAt + 1)
}
const lastSlash = pathname.lastIndexOf('/')
if (lastSlash > -1) {
user = decodeURIComponent(pathname.slice(0, lastSlash))
// we want nulls only, never empty strings
if (!user) {
user = null
}
project = decodeURIComponent(pathname.slice(lastSlash + 1))
} else {
project = decodeURIComponent(pathname)
}
if (project.endsWith('.git')) {
project = project.slice(0, -4)
}
if (parsed.hash) {
committish = decodeURIComponent(parsed.hash.slice(1))
}
defaultRepresentation = 'shortcut'
} else {
if (!gitHostInfo.protocols.includes(parsed.protocol)) {
return
}
const segments = gitHostInfo.extract(parsed)
if (!segments) {
return
}
user = segments.user && decodeURIComponent(segments.user)
project = decodeURIComponent(segments.project)
committish = decodeURIComponent(segments.committish)
defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
}
} catch (err) {
/* istanbul ignore else */
if (err instanceof URIError) {
return
} else {
throw err
}
}
return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
}

View File

@@ -0,0 +1,231 @@
/* eslint-disable max-len */
'use strict'
const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
const formatHashFragment = (f) => f.toLowerCase()
.replace(/^\W+/g, '') // strip leading non-characters
.replace(/(?<!\W)\W+$/, '') // strip trailing non-characters
.replace(/\//g, '') // strip all slashes
.replace(/\W+/g, '-') // replace remaining non-characters with '-'
const defaults = {
sshtemplate: ({ domain, user, project, committish }) =>
`git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
sshurltemplate: ({ domain, user, project, committish }) =>
`git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
edittemplate: ({ domain, user, project, committish, editpath, path }) =>
`https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
browsetemplate: ({ domain, user, project, committish, treepath }) =>
`https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
`https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
`https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
docstemplate: ({ domain, user, project, treepath, committish }) =>
`https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
httpstemplate: ({ auth, domain, user, project, committish }) =>
`git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
filetemplate: ({ domain, user, project, committish, path }) =>
`https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
shortcuttemplate: ({ type, user, project, committish }) =>
`${type}:${user}/${project}${maybeJoin('#', committish)}`,
pathtemplate: ({ user, project, committish }) =>
`${user}/${project}${maybeJoin('#', committish)}`,
bugstemplate: ({ domain, user, project }) =>
`https://${domain}/${user}/${project}/issues`,
hashformat: formatHashFragment,
}
const hosts = {}
hosts.github = {
// First two are insecure and generally shouldn't be used any more, but
// they are still supported.
protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
domain: 'github.com',
treepath: 'tree',
blobpath: 'blob',
editpath: 'edit',
filetemplate: ({ auth, user, project, committish, path }) =>
`https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
gittemplate: ({ auth, domain, user, project, committish }) =>
`git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
tarballtemplate: ({ domain, user, project, committish }) =>
`https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
extract: (url) => {
let [, user, project, type, committish] = url.pathname.split('/', 5)
if (type && type !== 'tree') {
return
}
if (!type) {
committish = url.hash.slice(1)
}
if (project && project.endsWith('.git')) {
project = project.slice(0, -4)
}
if (!user || !project) {
return
}
return { user, project, committish }
},
}
hosts.bitbucket = {
protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
domain: 'bitbucket.org',
treepath: 'src',
blobpath: 'src',
editpath: '?mode=edit',
edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
`https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
tarballtemplate: ({ domain, user, project, committish }) =>
`https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
extract: (url) => {
let [, user, project, aux] = url.pathname.split('/', 4)
if (['get'].includes(aux)) {
return
}
if (project && project.endsWith('.git')) {
project = project.slice(0, -4)
}
if (!user || !project) {
return
}
return { user, project, committish: url.hash.slice(1) }
},
}
hosts.gitlab = {
protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
domain: 'gitlab.com',
treepath: 'tree',
blobpath: 'tree',
editpath: '-/edit',
httpstemplate: ({ auth, domain, user, project, committish }) =>
`git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
tarballtemplate: ({ domain, user, project, committish }) =>
`https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
extract: (url) => {
const path = url.pathname.slice(1)
if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
return
}
const segments = path.split('/')
let project = segments.pop()
if (project.endsWith('.git')) {
project = project.slice(0, -4)
}
const user = segments.join('/')
if (!user || !project) {
return
}
return { user, project, committish: url.hash.slice(1) }
},
}
hosts.gist = {
protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
domain: 'gist.github.com',
editpath: 'edit',
sshtemplate: ({ domain, project, committish }) =>
`git@${domain}:${project}.git${maybeJoin('#', committish)}`,
sshurltemplate: ({ domain, project, committish }) =>
`git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
edittemplate: ({ domain, user, project, committish, editpath }) =>
`https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
browsetemplate: ({ domain, project, committish }) =>
`https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
`https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
`https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
docstemplate: ({ domain, project, committish }) =>
`https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
httpstemplate: ({ domain, project, committish }) =>
`git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
filetemplate: ({ user, project, committish, path }) =>
`https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
shortcuttemplate: ({ type, project, committish }) =>
`${type}:${project}${maybeJoin('#', committish)}`,
pathtemplate: ({ project, committish }) =>
`${project}${maybeJoin('#', committish)}`,
bugstemplate: ({ domain, project }) =>
`https://${domain}/${project}`,
gittemplate: ({ domain, project, committish }) =>
`git://${domain}/${project}.git${maybeJoin('#', committish)}`,
tarballtemplate: ({ project, committish }) =>
`https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
extract: (url) => {
let [, user, project, aux] = url.pathname.split('/', 4)
if (aux === 'raw') {
return
}
if (!project) {
if (!user) {
return
}
project = user
user = null
}
if (project.endsWith('.git')) {
project = project.slice(0, -4)
}
return { user, project, committish: url.hash.slice(1) }
},
hashformat: function (fragment) {
return fragment && 'file-' + formatHashFragment(fragment)
},
}
hosts.sourcehut = {
protocols: ['git+ssh:', 'https:'],
domain: 'git.sr.ht',
treepath: 'tree',
blobpath: 'tree',
filetemplate: ({ domain, user, project, committish, path }) =>
`https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
httpstemplate: ({ domain, user, project, committish }) =>
`https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
tarballtemplate: ({ domain, user, project, committish }) =>
`https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
bugstemplate: () => null,
extract: (url) => {
let [, user, project, aux] = url.pathname.split('/', 4)
// tarball url
if (['archive'].includes(aux)) {
return
}
if (project && project.endsWith('.git')) {
project = project.slice(0, -4)
}
if (!user || !project) {
return
}
return { user, project, committish: url.hash.slice(1) }
},
}
for (const [name, host] of Object.entries(hosts)) {
hosts[name] = Object.assign({}, defaults, host)
}
module.exports = hosts

View File

@@ -0,0 +1,227 @@
'use strict'
const { LRUCache } = require('lru-cache')
const hosts = require('./hosts.js')
const fromUrl = require('./from-url.js')
const parseUrl = require('./parse-url.js')
const cache = new LRUCache({ max: 1000 })
function unknownHostedUrl (url) {
try {
const {
protocol,
hostname,
pathname,
} = new URL(url)
if (!hostname) {
return null
}
const proto = /(?:git\+)http:$/.test(protocol) ? 'http:' : 'https:'
const path = pathname.replace(/\.git$/, '')
return `${proto}//${hostname}${path}`
} catch {
return null
}
}
class GitHost {
constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
Object.assign(this, GitHost.#gitHosts[type], {
type,
user,
auth,
project,
committish,
default: defaultRepresentation,
opts,
})
}
static #gitHosts = { byShortcut: {}, byDomain: {} }
static #protocols = {
'git+ssh:': { name: 'sshurl' },
'ssh:': { name: 'sshurl' },
'git+https:': { name: 'https', auth: true },
'git:': { auth: true },
'http:': { auth: true },
'https:': { auth: true },
'git+http:': { auth: true },
}
static addHost (name, host) {
GitHost.#gitHosts[name] = host
GitHost.#gitHosts.byDomain[host.domain] = name
GitHost.#gitHosts.byShortcut[`${name}:`] = name
GitHost.#protocols[`${name}:`] = { name }
}
static fromUrl (giturl, opts) {
if (typeof giturl !== 'string') {
return
}
const key = giturl + JSON.stringify(opts || {})
if (!cache.has(key)) {
const hostArgs = fromUrl(giturl, opts, {
gitHosts: GitHost.#gitHosts,
protocols: GitHost.#protocols,
})
cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
}
return cache.get(key)
}
static fromManifest (manifest, opts = {}) {
if (!manifest || typeof manifest !== 'object') {
return
}
const r = manifest.repository
// TODO: look into also checking the `bugs`/`homepage` URLs
const rurl = r && (
typeof r === 'string'
? r
: typeof r === 'object' && typeof r.url === 'string'
? r.url
: null
)
if (!rurl) {
throw new Error('no repository')
}
const info = (rurl && GitHost.fromUrl(rurl.replace(/^git\+/, ''), opts)) || null
if (info) {
return info
}
const unk = unknownHostedUrl(rurl)
return GitHost.fromUrl(unk, opts) || unk
}
static parseUrl (url) {
return parseUrl(url)
}
#fill (template, opts) {
if (typeof template !== 'function') {
return null
}
const options = { ...this, ...this.opts, ...opts }
// the path should always be set so we don't end up with 'undefined' in urls
if (!options.path) {
options.path = ''
}
// template functions will insert the leading slash themselves
if (options.path.startsWith('/')) {
options.path = options.path.slice(1)
}
if (options.noCommittish) {
options.committish = null
}
const result = template(options)
return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
}
hash () {
return this.committish ? `#${this.committish}` : ''
}
ssh (opts) {
return this.#fill(this.sshtemplate, opts)
}
sshurl (opts) {
return this.#fill(this.sshurltemplate, opts)
}
browse (path, ...args) {
// not a string, treat path as opts
if (typeof path !== 'string') {
return this.#fill(this.browsetemplate, path)
}
if (typeof args[0] !== 'string') {
return this.#fill(this.browsetreetemplate, { ...args[0], path })
}
return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
}
// If the path is known to be a file, then browseFile should be used. For some hosts
// the url is the same as browse, but for others like GitHub a file can use both `/tree/`
// and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
// path will redirect to a specific commit. Using the `/blob/` path avoids this and
// does not redirect to a different commit.
browseFile (path, ...args) {
if (typeof args[0] !== 'string') {
return this.#fill(this.browseblobtemplate, { ...args[0], path })
}
return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
}
docs (opts) {
return this.#fill(this.docstemplate, opts)
}
bugs (opts) {
return this.#fill(this.bugstemplate, opts)
}
https (opts) {
return this.#fill(this.httpstemplate, opts)
}
git (opts) {
return this.#fill(this.gittemplate, opts)
}
shortcut (opts) {
return this.#fill(this.shortcuttemplate, opts)
}
path (opts) {
return this.#fill(this.pathtemplate, opts)
}
tarball (opts) {
return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
}
file (path, opts) {
return this.#fill(this.filetemplate, { ...opts, path })
}
edit (path, opts) {
return this.#fill(this.edittemplate, { ...opts, path })
}
getDefaultRepresentation () {
return this.default
}
toString (opts) {
if (this.default && typeof this[this.default] === 'function') {
return this[this.default](opts)
}
return this.sshurl(opts)
}
}
for (const [name, host] of Object.entries(hosts)) {
GitHost.addHost(name, host)
}
module.exports = GitHost

View File

@@ -0,0 +1,78 @@
const url = require('url')
const lastIndexOfBefore = (str, char, beforeChar) => {
const startPosition = str.indexOf(beforeChar)
return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
}
const safeUrl = (u) => {
try {
return new url.URL(u)
} catch {
// this fn should never throw
}
}
// accepts input like git:github.com:user/repo and inserts the // after the first :
const correctProtocol = (arg, protocols) => {
const firstColon = arg.indexOf(':')
const proto = arg.slice(0, firstColon + 1)
if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
return arg
}
const firstAt = arg.indexOf('@')
if (firstAt > -1) {
if (firstAt > firstColon) {
return `git+ssh://${arg}`
} else {
return arg
}
}
const doubleSlash = arg.indexOf('//')
if (doubleSlash === firstColon + 1) {
return arg
}
return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
}
// attempt to correct an scp style url so that it will parse with `new URL()`
const correctUrl = (giturl) => {
// ignore @ that come after the first hash since the denotes the start
// of a committish which can contain @ characters
const firstAt = lastIndexOfBefore(giturl, '@', '#')
// ignore colons that come after the hash since that could include colons such as:
// git@github.com:user/package-2#semver:^1.0.0
const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
if (lastColonBeforeHash > firstAt) {
// the last : comes after the first @ (or there is no @)
// like it would in:
// proto://hostname.com:user/repo
// username@hostname.com:user/repo
// :password@hostname.com:user/repo
// username:password@hostname.com:user/repo
// proto://username@hostname.com:user/repo
// proto://:password@hostname.com:user/repo
// proto://username:password@hostname.com:user/repo
// then we replace the last : with a / to create a valid path
giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
}
if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
// we have no : at all
// as it would be in:
// username@hostname.com/user/repo
// then we prepend a protocol
giturl = `git+ssh://${giturl}`
}
return giturl
}
module.exports = (giturl, protocols) => {
const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
}

View File

@@ -0,0 +1,61 @@
{
"name": "hosted-git-info",
"version": "8.1.0",
"description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
"main": "./lib/index.js",
"repository": {
"type": "git",
"url": "git+https://github.com/npm/hosted-git-info.git"
},
"keywords": [
"git",
"github",
"bitbucket",
"gitlab"
],
"author": "GitHub Inc.",
"license": "ISC",
"bugs": {
"url": "https://github.com/npm/hosted-git-info/issues"
},
"homepage": "https://github.com/npm/hosted-git-info",
"scripts": {
"posttest": "npm run lint",
"snap": "tap",
"test": "tap",
"test:coverage": "tap --coverage-report=html",
"lint": "npm run eslint",
"postlint": "template-oss-check",
"lintfix": "npm run eslint -- --fix",
"template-oss-apply": "template-oss-apply --force",
"eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
},
"dependencies": {
"lru-cache": "^10.0.1"
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
"@npmcli/template-oss": "4.24.3",
"tap": "^16.0.1"
},
"files": [
"bin/",
"lib/"
],
"engines": {
"node": "^18.17.0 || >=20.5.0"
},
"tap": {
"color": 1,
"coverage": true,
"nyc-arg": [
"--exclude",
"tap-snapshots/**"
]
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.24.3",
"publish": "true"
}
}

View File

@@ -0,0 +1,15 @@
The ISC License
Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@@ -0,0 +1,331 @@
# lru-cache
A cache object that deletes the least-recently-used items.
Specify a max number of the most recently used items that you
want to keep, and this cache will keep that many of the most
recently accessed items.
This is not primarily a TTL cache, and does not make strong TTL
guarantees. There is no preemptive pruning of expired items by
default, but you _may_ set a TTL on the cache or on a single
`set`. If you do so, it will treat expired items as missing, and
delete them when fetched. If you are more interested in TTL
caching than LRU caching, check out
[@isaacs/ttlcache](http://npm.im/@isaacs/ttlcache).
As of version 7, this is one of the most performant LRU
implementations available in JavaScript, and supports a wide
diversity of use cases. However, note that using some of the
features will necessarily impact performance, by causing the
cache to have to do more work. See the "Performance" section
below.
## Installation
```bash
npm install lru-cache --save
```
## Usage
```js
// hybrid module, either works
import { LRUCache } from 'lru-cache'
// or:
const { LRUCache } = require('lru-cache')
// or in minified form for web browsers:
import { LRUCache } from 'http://unpkg.com/lru-cache@9/dist/mjs/index.min.mjs'
// At least one of 'max', 'ttl', or 'maxSize' is required, to prevent
// unsafe unbounded storage.
//
// In most cases, it's best to specify a max for performance, so all
// the required memory allocation is done up-front.
//
// All the other options are optional, see the sections below for
// documentation on what each one does. Most of them can be
// overridden for specific items in get()/set()
const options = {
max: 500,
// for use with tracking overall storage size
maxSize: 5000,
sizeCalculation: (value, key) => {
return 1
},
// for use when you need to clean up something when objects
// are evicted from the cache
dispose: (value, key) => {
freeFromMemoryOrWhatever(value)
},
// how long to live in ms
ttl: 1000 * 60 * 5,
// return stale items before removing from cache?
allowStale: false,
updateAgeOnGet: false,
updateAgeOnHas: false,
// async method to use for cache.fetch(), for
// stale-while-revalidate type of behavior
fetchMethod: async (
key,
staleValue,
{ options, signal, context }
) => {},
}
const cache = new LRUCache(options)
cache.set('key', 'value')
cache.get('key') // "value"
// non-string keys ARE fully supported
// but note that it must be THE SAME object, not
// just a JSON-equivalent object.
var someObject = { a: 1 }
cache.set(someObject, 'a value')
// Object keys are not toString()-ed
cache.set('[object Object]', 'a different value')
assert.equal(cache.get(someObject), 'a value')
// A similar object with same keys/values won't work,
// because it's a different object identity
assert.equal(cache.get({ a: 1 }), undefined)
cache.clear() // empty the cache
```
If you put more stuff in the cache, then less recently used items
will fall out. That's what an LRU cache is.
For full description of the API and all options, please see [the
LRUCache typedocs](https://isaacs.github.io/node-lru-cache/)
## Storage Bounds Safety
This implementation aims to be as flexible as possible, within
the limits of safe memory consumption and optimal performance.
At initial object creation, storage is allocated for `max` items.
If `max` is set to zero, then some performance is lost, and item
count is unbounded. Either `maxSize` or `ttl` _must_ be set if
`max` is not specified.
If `maxSize` is set, then this creates a safe limit on the
maximum storage consumed, but without the performance benefits of
pre-allocation. When `maxSize` is set, every item _must_ provide
a size, either via the `sizeCalculation` method provided to the
constructor, or via a `size` or `sizeCalculation` option provided
to `cache.set()`. The size of every item _must_ be a positive
integer.
If neither `max` nor `maxSize` are set, then `ttl` tracking must
be enabled. Note that, even when tracking item `ttl`, items are
_not_ preemptively deleted when they become stale, unless
`ttlAutopurge` is enabled. Instead, they are only purged the
next time the key is requested. Thus, if `ttlAutopurge`, `max`,
and `maxSize` are all not set, then the cache will potentially
grow unbounded.
In this case, a warning is printed to standard error. Future
versions may require the use of `ttlAutopurge` if `max` and
`maxSize` are not specified.
If you truly wish to use a cache that is bound _only_ by TTL
expiration, consider using a `Map` object, and calling
`setTimeout` to delete entries when they expire. It will perform
much better than an LRU cache.
Here is an implementation you may use, under the same
[license](./LICENSE) as this package:
```js
// a storage-unbounded ttl cache that is not an lru-cache
const cache = {
data: new Map(),
timers: new Map(),
set: (k, v, ttl) => {
if (cache.timers.has(k)) {
clearTimeout(cache.timers.get(k))
}
cache.timers.set(
k,
setTimeout(() => cache.delete(k), ttl)
)
cache.data.set(k, v)
},
get: k => cache.data.get(k),
has: k => cache.data.has(k),
delete: k => {
if (cache.timers.has(k)) {
clearTimeout(cache.timers.get(k))
}
cache.timers.delete(k)
return cache.data.delete(k)
},
clear: () => {
cache.data.clear()
for (const v of cache.timers.values()) {
clearTimeout(v)
}
cache.timers.clear()
},
}
```
If that isn't to your liking, check out
[@isaacs/ttlcache](http://npm.im/@isaacs/ttlcache).
## Storing Undefined Values
This cache never stores undefined values, as `undefined` is used
internally in a few places to indicate that a key is not in the
cache.
You may call `cache.set(key, undefined)`, but this is just
an alias for `cache.delete(key)`. Note that this has the effect
that `cache.has(key)` will return _false_ after setting it to
undefined.
```js
cache.set(myKey, undefined)
cache.has(myKey) // false!
```
If you need to track `undefined` values, and still note that the
key is in the cache, an easy workaround is to use a sigil object
of your own.
```js
import { LRUCache } from 'lru-cache'
const undefinedValue = Symbol('undefined')
const cache = new LRUCache(...)
const mySet = (key, value) =>
cache.set(key, value === undefined ? undefinedValue : value)
const myGet = (key, value) => {
const v = cache.get(key)
return v === undefinedValue ? undefined : v
}
```
## Performance
As of January 2022, version 7 of this library is one of the most
performant LRU cache implementations in JavaScript.
Benchmarks can be extremely difficult to get right. In
particular, the performance of set/get/delete operations on
objects will vary _wildly_ depending on the type of key used. V8
is highly optimized for objects with keys that are short strings,
especially integer numeric strings. Thus any benchmark which
tests _solely_ using numbers as keys will tend to find that an
object-based approach performs the best.
Note that coercing _anything_ to strings to use as object keys is
unsafe, unless you can be 100% certain that no other type of
value will be used. For example:
```js
const myCache = {}
const set = (k, v) => (myCache[k] = v)
const get = k => myCache[k]
set({}, 'please hang onto this for me')
set('[object Object]', 'oopsie')
```
Also beware of "Just So" stories regarding performance. Garbage
collection of large (especially: deep) object graphs can be
incredibly costly, with several "tipping points" where it
increases exponentially. As a result, putting that off until
later can make it much worse, and less predictable. If a library
performs well, but only in a scenario where the object graph is
kept shallow, then that won't help you if you are using large
objects as keys.
In general, when attempting to use a library to improve
performance (such as a cache like this one), it's best to choose
an option that will perform well in the sorts of scenarios where
you'll actually use it.
This library is optimized for repeated gets and minimizing
eviction time, since that is the expected need of a LRU. Set
operations are somewhat slower on average than a few other
options, in part because of that optimization. It is assumed
that you'll be caching some costly operation, ideally as rarely
as possible, so optimizing set over get would be unwise.
If performance matters to you:
1. If it's at all possible to use small integer values as keys,
and you can guarantee that no other types of values will be
used as keys, then do that, and use a cache such as
[lru-fast](https://npmjs.com/package/lru-fast), or
[mnemonist's
LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache)
which uses an Object as its data store.
2. Failing that, if at all possible, use short non-numeric
strings (ie, less than 256 characters) as your keys, and use
[mnemonist's
LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache).
3. If the types of your keys will be anything else, especially
long strings, strings that look like floats, objects, or some
mix of types, or if you aren't sure, then this library will
work well for you.
If you do not need the features that this library provides
(like asynchronous fetching, a variety of TTL staleness
options, and so on), then [mnemonist's
LRUMap](https://yomguithereal.github.io/mnemonist/lru-map) is
a very good option, and just slightly faster than this module
(since it does considerably less).
4. Do not use a `dispose` function, size tracking, or especially
ttl behavior, unless absolutely needed. These features are
convenient, and necessary in some use cases, and every attempt
has been made to make the performance impact minimal, but it
isn't nothing.
## Breaking Changes in Version 7
This library changed to a different algorithm and internal data
structure in version 7, yielding significantly better
performance, albeit with some subtle changes as a result.
If you were relying on the internals of LRUCache in version 6 or
before, it probably will not work in version 7 and above.
## Breaking Changes in Version 8
- The `fetchContext` option was renamed to `context`, and may no
longer be set on the cache instance itself.
- Rewritten in TypeScript, so pretty much all the types moved
around a lot.
- The AbortController/AbortSignal polyfill was removed. For this
reason, **Node version 16.14.0 or higher is now required**.
- Internal properties were moved to actual private class
properties.
- Keys and values must not be `null` or `undefined`.
- Minified export available at `'lru-cache/min'`, for both CJS
and MJS builds.
## Breaking Changes in Version 9
- Named export only, no default export.
- AbortController polyfill returned, albeit with a warning when
used.
## Breaking Changes in Version 10
- `cache.fetch()` return type is now `Promise<V | undefined>`
instead of `Promise<V | void>`. This is an irrelevant change
practically speaking, but can require changes for TypeScript
users.
For more info, see the [change log](CHANGELOG.md).

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
{
"type": "commonjs"
}

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
{
"type": "module"
}

View File

@@ -0,0 +1,116 @@
{
"name": "lru-cache",
"publishConfig": {
"tag": "legacy-v10"
},
"description": "A cache object that deletes the least-recently-used items.",
"version": "10.4.3",
"author": "Isaac Z. Schlueter <i@izs.me>",
"keywords": [
"mru",
"lru",
"cache"
],
"sideEffects": false,
"scripts": {
"build": "npm run prepare",
"prepare": "tshy && bash fixup.sh",
"pretest": "npm run prepare",
"presnap": "npm run prepare",
"test": "tap",
"snap": "tap",
"preversion": "npm test",
"postversion": "npm publish",
"prepublishOnly": "git push origin --follow-tags",
"format": "prettier --write .",
"typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
"benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
"prebenchmark": "npm run prepare",
"benchmark": "make -C benchmark",
"preprofile": "npm run prepare",
"profile": "make -C benchmark profile"
},
"main": "./dist/commonjs/index.js",
"types": "./dist/commonjs/index.d.ts",
"tshy": {
"exports": {
".": "./src/index.ts",
"./min": {
"import": {
"types": "./dist/esm/index.d.ts",
"default": "./dist/esm/index.min.js"
},
"require": {
"types": "./dist/commonjs/index.d.ts",
"default": "./dist/commonjs/index.min.js"
}
}
}
},
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-lru-cache.git"
},
"devDependencies": {
"@types/node": "^20.2.5",
"@types/tap": "^15.0.6",
"benchmark": "^2.1.4",
"esbuild": "^0.17.11",
"eslint-config-prettier": "^8.5.0",
"marked": "^4.2.12",
"mkdirp": "^2.1.5",
"prettier": "^2.6.2",
"tap": "^20.0.3",
"tshy": "^2.0.0",
"tslib": "^2.4.0",
"typedoc": "^0.25.3",
"typescript": "^5.2.2"
},
"license": "ISC",
"files": [
"dist"
],
"prettier": {
"semi": false,
"printWidth": 70,
"tabWidth": 2,
"useTabs": false,
"singleQuote": true,
"jsxSingleQuote": false,
"bracketSameLine": true,
"arrowParens": "avoid",
"endOfLine": "lf"
},
"tap": {
"node-arg": [
"--expose-gc"
],
"plugin": [
"@tapjs/clock"
]
},
"exports": {
".": {
"import": {
"types": "./dist/esm/index.d.ts",
"default": "./dist/esm/index.js"
},
"require": {
"types": "./dist/commonjs/index.d.ts",
"default": "./dist/commonjs/index.js"
}
},
"./min": {
"import": {
"types": "./dist/esm/index.d.ts",
"default": "./dist/esm/index.min.js"
},
"require": {
"types": "./dist/commonjs/index.d.ts",
"default": "./dist/commonjs/index.min.js"
}
}
},
"type": "module",
"module": "./dist/esm/index.js"
}

61
node_modules/@npmcli/package-json/package.json generated vendored Normal file
View File

@@ -0,0 +1,61 @@
{
"name": "@npmcli/package-json",
"version": "6.2.0",
"description": "Programmatic API to update package.json",
"keywords": [
"npm",
"oss"
],
"repository": {
"type": "git",
"url": "git+https://github.com/npm/package-json.git"
},
"license": "ISC",
"author": "GitHub Inc.",
"main": "lib/index.js",
"files": [
"bin/",
"lib/"
],
"scripts": {
"snap": "tap",
"test": "tap",
"lint": "npm run eslint",
"lintfix": "npm run eslint -- --fix",
"posttest": "npm run lint",
"postsnap": "npm run lintfix --",
"postlint": "template-oss-check",
"template-oss-apply": "template-oss-apply --force",
"eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
},
"dependencies": {
"@npmcli/git": "^6.0.0",
"glob": "^10.2.2",
"hosted-git-info": "^8.0.0",
"json-parse-even-better-errors": "^4.0.0",
"proc-log": "^5.0.0",
"semver": "^7.5.3",
"validate-npm-package-license": "^3.0.4"
},
"devDependencies": {
"@npmcli/eslint-config": "^5.1.0",
"@npmcli/template-oss": "4.23.6",
"read-package-json": "^7.0.0",
"read-package-json-fast": "^4.0.0",
"tap": "^16.0.1"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.23.6",
"publish": "true"
},
"tap": {
"nyc-arg": [
"--exclude",
"tap-snapshots/**"
]
}
}