mirror of https://github.com/nodejs/node.git
deps: upgrade npm to 8.3.0
PR-URL: https://github.com/nodejs/node/pull/41127 Reviewed-By: Rich Trott <rtrott@gmail.com> Reviewed-By: Luigi Pinca <luigipinca@gmail.com> Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
This commit is contained in:
parent
e46446dceb
commit
0f5aedb4a8
|
@ -871,6 +871,109 @@ if (foo) {
|
|||
Entries in `optionalDependencies` will override entries of the same name in
|
||||
`dependencies`, so it's usually best to only put in one place.
|
||||
|
||||
### overrides
|
||||
|
||||
If you need to make specific changes to dependencies of your dependencies, for
|
||||
example replacing the version of a dependency with a known security issue,
|
||||
replacing an existing dependency with a fork, or making sure that the same
|
||||
version of a package is used everywhere, then you may add an override.
|
||||
|
||||
Overrides provide a way to replace a package in your dependency tree with
|
||||
another version, or another package entirely. These changes can be scoped as
|
||||
specific or as vague as desired.
|
||||
|
||||
To make sure the package `foo` is always installed as version `1.0.0` no matter
|
||||
what version your dependencies rely on:
|
||||
|
||||
```json
|
||||
{
|
||||
"overrides": {
|
||||
"foo": "1.0.0"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The above is a short hand notation, the full object form can be used to allow
|
||||
overriding a package itself as well as a child of the package. This will cause
|
||||
`foo` to always be `1.0.0` while also making `bar` at any depth beyond `foo`
|
||||
also `1.0.0`:
|
||||
|
||||
```json
|
||||
{
|
||||
"overrides": {
|
||||
"foo": {
|
||||
".": "1.0.0",
|
||||
"bar": "1.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To only override `foo` to be `1.0.0` when it's a child (or grandchild, or great
|
||||
grandchild, etc) of the package `bar`:
|
||||
|
||||
```json
|
||||
{
|
||||
"overrides": {
|
||||
"bar": {
|
||||
"foo": "1.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Keys can be nested to any arbitrary length. To override `foo` only when it's a
|
||||
child of `bar` and only when `bar` is a child of `baz`:
|
||||
|
||||
```json
|
||||
{
|
||||
"overrides": {
|
||||
"baz": {
|
||||
"bar": {
|
||||
"foo": "1.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The key of an override can also include a version, or range of versions.
|
||||
To override `foo` to `1.0.0`, but only when it's a child of `bar@2.0.0`:
|
||||
|
||||
```json
|
||||
{
|
||||
"overrides": {
|
||||
"bar@2.0.0": {
|
||||
"foo": "1.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You may not set an override for a package that you directly depend on unless
|
||||
both the dependency and the override itself share the exact same spec. To make
|
||||
this limitation easier to deal with, overrides may also be defined as a
|
||||
reference to a spec for a direct dependency by prefixing the name of the
|
||||
package you wish the version to match with a `$`.
|
||||
|
||||
```json
|
||||
{
|
||||
"dependencies": {
|
||||
"foo": "^1.0.0"
|
||||
},
|
||||
"overrides": {
|
||||
// BAD, will throw an EOVERRIDE error
|
||||
// "foo": "^2.0.0"
|
||||
// GOOD, specs match so override is allowed
|
||||
// "foo": "^1.0.0"
|
||||
// BEST, the override is defined as a reference to the dependency
|
||||
"foo": "$foo",
|
||||
// the referenced package does not need to match the overridden one
|
||||
"bar": "$foo"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### engines
|
||||
|
||||
You can specify the version of node that your stuff works on:
|
||||
|
|
|
@ -160,7 +160,7 @@ tree at all, use <a href="../commands/npm-explain.html"><code>npm explain</code>
|
|||
the results to only the paths to the packages named. Note that nested
|
||||
packages will <em>also</em> show the paths to the specified packages. For
|
||||
example, running <code>npm ls promzard</code> in npm's source tree will show:</p>
|
||||
<pre lang="bash"><code>npm@8.2.0 /path/to/npm
|
||||
<pre lang="bash"><code>npm@8.3.0 /path/to/npm
|
||||
└─┬ init-package-json@0.0.4
|
||||
└── promzard@0.1.5
|
||||
</code></pre>
|
||||
|
|
|
@ -149,7 +149,7 @@ npm command-line interface
|
|||
<pre lang="bash"><code>npm <command> [args]
|
||||
</code></pre>
|
||||
<h3 id="version">Version</h3>
|
||||
<p>8.2.0</p>
|
||||
<p>8.3.0</p>
|
||||
<h3 id="description">Description</h3>
|
||||
<p>npm is the package manager for the Node JavaScript platform. It puts
|
||||
modules in place so that node can find them, and manages dependency
|
||||
|
|
|
@ -142,7 +142,7 @@ npm command-line interface
|
|||
|
||||
<section id="table_of_contents">
|
||||
<h2 id="table-of-contents">Table of contents</h2>
|
||||
<div id="_table_of_contents"><ul><li><a href="#description">Description</a></li><li><a href="#name">name</a></li><li><a href="#version">version</a></li><li><a href="#description2">description</a></li><li><a href="#keywords">keywords</a></li><li><a href="#homepage">homepage</a></li><li><a href="#bugs">bugs</a></li><li><a href="#license">license</a></li><li><a href="#people-fields-author-contributors">people fields: author, contributors</a></li><li><a href="#funding">funding</a></li><li><a href="#files">files</a></li><li><a href="#main">main</a></li><li><a href="#browser">browser</a></li><li><a href="#bin">bin</a></li><li><a href="#man">man</a></li><li><a href="#directories">directories</a></li><ul><li><a href="#directoriesbin">directories.bin</a></li><li><a href="#directoriesman">directories.man</a></li></ul><li><a href="#repository">repository</a></li><li><a href="#scripts">scripts</a></li><li><a href="#config">config</a></li><li><a href="#dependencies">dependencies</a></li><ul><li><a href="#urls-as-dependencies">URLs as Dependencies</a></li><li><a href="#git-urls-as-dependencies">Git URLs as Dependencies</a></li><li><a href="#github-urls">GitHub URLs</a></li><li><a href="#local-paths">Local Paths</a></li></ul><li><a href="#devdependencies">devDependencies</a></li><li><a href="#peerdependencies">peerDependencies</a></li><li><a href="#peerdependenciesmeta">peerDependenciesMeta</a></li><li><a href="#bundleddependencies">bundledDependencies</a></li><li><a href="#optionaldependencies">optionalDependencies</a></li><li><a href="#engines">engines</a></li><li><a href="#os">os</a></li><li><a href="#cpu">cpu</a></li><li><a href="#private">private</a></li><li><a href="#publishconfig">publishConfig</a></li><li><a href="#workspaces">workspaces</a></li><li><a href="#default-values">DEFAULT VALUES</a></li><li><a href="#see-also">SEE ALSO</a></li></ul></div>
|
||||
<div id="_table_of_contents"><ul><li><a href="#description">Description</a></li><li><a href="#name">name</a></li><li><a href="#version">version</a></li><li><a href="#description2">description</a></li><li><a href="#keywords">keywords</a></li><li><a href="#homepage">homepage</a></li><li><a href="#bugs">bugs</a></li><li><a href="#license">license</a></li><li><a href="#people-fields-author-contributors">people fields: author, contributors</a></li><li><a href="#funding">funding</a></li><li><a href="#files">files</a></li><li><a href="#main">main</a></li><li><a href="#browser">browser</a></li><li><a href="#bin">bin</a></li><li><a href="#man">man</a></li><li><a href="#directories">directories</a></li><ul><li><a href="#directoriesbin">directories.bin</a></li><li><a href="#directoriesman">directories.man</a></li></ul><li><a href="#repository">repository</a></li><li><a href="#scripts">scripts</a></li><li><a href="#config">config</a></li><li><a href="#dependencies">dependencies</a></li><ul><li><a href="#urls-as-dependencies">URLs as Dependencies</a></li><li><a href="#git-urls-as-dependencies">Git URLs as Dependencies</a></li><li><a href="#github-urls">GitHub URLs</a></li><li><a href="#local-paths">Local Paths</a></li></ul><li><a href="#devdependencies">devDependencies</a></li><li><a href="#peerdependencies">peerDependencies</a></li><li><a href="#peerdependenciesmeta">peerDependenciesMeta</a></li><li><a href="#bundleddependencies">bundledDependencies</a></li><li><a href="#optionaldependencies">optionalDependencies</a></li><li><a href="#overrides">overrides</a></li><li><a href="#engines">engines</a></li><li><a href="#os">os</a></li><li><a href="#cpu">cpu</a></li><li><a href="#private">private</a></li><li><a href="#publishconfig">publishConfig</a></li><li><a href="#workspaces">workspaces</a></li><li><a href="#default-values">DEFAULT VALUES</a></li><li><a href="#see-also">SEE ALSO</a></li></ul></div>
|
||||
</section>
|
||||
|
||||
<div id="_content"><h3 id="description">Description</h3>
|
||||
|
@ -800,6 +800,88 @@ if (foo) {
|
|||
</code></pre>
|
||||
<p>Entries in <code>optionalDependencies</code> will override entries of the same name in
|
||||
<code>dependencies</code>, so it's usually best to only put in one place.</p>
|
||||
<h3 id="overrides">overrides</h3>
|
||||
<p>If you need to make specific changes to dependencies of your dependencies, for
|
||||
example replacing the version of a dependency with a known security issue,
|
||||
replacing an existing dependency with a fork, or making sure that the same
|
||||
version of a package is used everywhere, then you may add an override.</p>
|
||||
<p>Overrides provide a way to replace a package in your dependency tree with
|
||||
another version, or another package entirely. These changes can be scoped as
|
||||
specific or as vague as desired.</p>
|
||||
<p>To make sure the package <code>foo</code> is always installed as version <code>1.0.0</code> no matter
|
||||
what version your dependencies rely on:</p>
|
||||
<pre lang="json"><code>{
|
||||
"overrides": {
|
||||
"foo": "1.0.0"
|
||||
}
|
||||
}
|
||||
</code></pre>
|
||||
<p>The above is a short hand notation, the full object form can be used to allow
|
||||
overriding a package itself as well as a child of the package. This will cause
|
||||
<code>foo</code> to always be <code>1.0.0</code> while also making <code>bar</code> at any depth beyond <code>foo</code>
|
||||
also <code>1.0.0</code>:</p>
|
||||
<pre lang="json"><code>{
|
||||
"overrides": {
|
||||
"foo": {
|
||||
".": "1.0.0",
|
||||
"bar": "1.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
</code></pre>
|
||||
<p>To only override <code>foo</code> to be <code>1.0.0</code> when it's a child (or grandchild, or great
|
||||
grandchild, etc) of the package <code>bar</code>:</p>
|
||||
<pre lang="json"><code>{
|
||||
"overrides": {
|
||||
"bar": {
|
||||
"foo": "1.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
</code></pre>
|
||||
<p>Keys can be nested to any arbitrary length. To override <code>foo</code> only when it's a
|
||||
child of <code>bar</code> and only when <code>bar</code> is a child of <code>baz</code>:</p>
|
||||
<pre lang="json"><code>{
|
||||
"overrides": {
|
||||
"baz": {
|
||||
"bar": {
|
||||
"foo": "1.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</code></pre>
|
||||
<p>The key of an override can also include a version, or range of versions.
|
||||
To override <code>foo</code> to <code>1.0.0</code>, but only when it's a child of <code>bar@2.0.0</code>:</p>
|
||||
<pre lang="json"><code>{
|
||||
"overrides": {
|
||||
"bar@2.0.0": {
|
||||
"foo": "1.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
</code></pre>
|
||||
<p>You may not set an override for a package that you directly depend on unless
|
||||
both the dependency and the override itself share the exact same spec. To make
|
||||
this limitation easier to deal with, overrides may also be defined as a
|
||||
reference to a spec for a direct dependency by prefixing the name of the
|
||||
package you wish the version to match with a <code>$</code>.</p>
|
||||
<pre lang="json"><code>{
|
||||
"dependencies": {
|
||||
"foo": "^1.0.0"
|
||||
},
|
||||
"overrides": {
|
||||
// BAD, will throw an EOVERRIDE error
|
||||
// "foo": "^2.0.0"
|
||||
// GOOD, specs match so override is allowed
|
||||
// "foo": "^1.0.0"
|
||||
// BEST, the override is defined as a reference to the dependency
|
||||
"foo": "$foo",
|
||||
// the referenced package does not need to match the overridden one
|
||||
"bar": "$foo"
|
||||
}
|
||||
}
|
||||
</code></pre>
|
||||
<h3 id="engines">engines</h3>
|
||||
<p>You can specify the version of node that your stuff works on:</p>
|
||||
<pre lang="json"><code>{
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
const configDefs = require('../utils/config/index.js')
|
||||
|
||||
const mkdirp = require('mkdirp-infer-owner')
|
||||
const { dirname } = require('path')
|
||||
const { dirname, resolve } = require('path')
|
||||
const { promisify } = require('util')
|
||||
const fs = require('fs')
|
||||
const readFile = promisify(fs.readFile)
|
||||
|
@ -11,6 +11,7 @@ const { spawn } = require('child_process')
|
|||
const { EOL } = require('os')
|
||||
const ini = require('ini')
|
||||
const localeCompare = require('@isaacs/string-locale-compare')('en')
|
||||
const rpj = require('read-package-json-fast')
|
||||
const log = require('../utils/log-shim.js')
|
||||
|
||||
// take an array of `[key, value, k2=v2, k3, v3, ...]` and turn into
|
||||
|
@ -28,7 +29,17 @@ const keyValues = args => {
|
|||
return kv
|
||||
}
|
||||
|
||||
const publicVar = k => !/^(\/\/[^:]+:)?_/.test(k)
|
||||
const publicVar = k => {
|
||||
// _password
|
||||
if (k.startsWith('_')) {
|
||||
return false
|
||||
}
|
||||
// //localhost:8080/:_password
|
||||
if (k.startsWith('//') && k.includes(':_')) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
const BaseCommand = require('../base-command.js')
|
||||
class Config extends BaseCommand {
|
||||
|
@ -147,7 +158,7 @@ class Config extends BaseCommand {
|
|||
const out = []
|
||||
for (const key of keys) {
|
||||
if (!publicVar(key)) {
|
||||
throw `The ${key} option is protected, and cannot be retrieved in this way`
|
||||
throw new Error(`The ${key} option is protected, and cannot be retrieved in this way`)
|
||||
}
|
||||
|
||||
const pref = keys.length > 1 ? `${key}=` : ''
|
||||
|
@ -257,6 +268,23 @@ ${defData}
|
|||
`; HOME = ${process.env.HOME}`,
|
||||
'; Run `npm config ls -l` to show all defaults.'
|
||||
)
|
||||
msg.push('')
|
||||
}
|
||||
|
||||
if (!this.npm.config.get('global')) {
|
||||
const pkgPath = resolve(this.npm.prefix, 'package.json')
|
||||
const pkg = await rpj(pkgPath).catch(() => ({}))
|
||||
|
||||
if (pkg.publishConfig) {
|
||||
msg.push(`; "publishConfig" from ${pkgPath}`)
|
||||
msg.push('; This set of config values will be used at publish-time.', '')
|
||||
const pkgKeys = Object.keys(pkg.publishConfig).sort(localeCompare)
|
||||
for (const k of pkgKeys) {
|
||||
const v = publicVar(k) ? JSON.stringify(pkg.publishConfig[k]) : '(protected)'
|
||||
msg.push(`${k} = ${v}`)
|
||||
}
|
||||
msg.push('')
|
||||
}
|
||||
}
|
||||
|
||||
this.npm.output(msg.join('\n').trim())
|
||||
|
|
|
@ -104,11 +104,15 @@ class Publish extends BaseCommand {
|
|||
const resolved = npa.resolve(manifest.name, manifest.version)
|
||||
const registry = npmFetch.pickRegistry(resolved, opts)
|
||||
const creds = this.npm.config.getCredentialsByURI(registry)
|
||||
const outputRegistry = replaceInfo(registry)
|
||||
if (!creds.token && !creds.username) {
|
||||
throw Object.assign(new Error('This command requires you to be logged in.'), {
|
||||
code: 'ENEEDAUTH',
|
||||
})
|
||||
throw Object.assign(
|
||||
new Error(`This command requires you to be logged in to ${outputRegistry}`), {
|
||||
code: 'ENEEDAUTH',
|
||||
}
|
||||
)
|
||||
}
|
||||
log.notice('', `Publishing to ${outputRegistry}`)
|
||||
await otplease(opts, opts => libpub(manifest, tarballData, opts))
|
||||
}
|
||||
|
||||
|
|
|
@ -116,6 +116,7 @@ const exitHandler = err => {
|
|||
exitCode = err.code
|
||||
noLogMessage = true
|
||||
} else if (typeof err === 'string') {
|
||||
// XXX: we should stop throwing strings
|
||||
log.error('', err)
|
||||
noLogMessage = true
|
||||
} else if (!(err instanceof Error)) {
|
||||
|
|
|
@ -8,6 +8,8 @@ const fsMiniPass = require('fs-minipass')
|
|||
const log = require('./log-shim')
|
||||
const withChownSync = require('./with-chown-sync')
|
||||
|
||||
const padZero = (n, length) => n.toString().padStart(length.toString().length, '0')
|
||||
|
||||
const _logHandler = Symbol('logHandler')
|
||||
const _formatLogItem = Symbol('formatLogItem')
|
||||
const _getLogFilePath = Symbol('getLogFilePath')
|
||||
|
@ -34,7 +36,7 @@ class LogFiles {
|
|||
// here for infinite loops that still log. This is also partially handled
|
||||
// by the config.get('max-files') option, but this is a failsafe to
|
||||
// prevent runaway log file creation
|
||||
#MAX_LOG_FILES_PER_PROCESS = null
|
||||
#MAX_FILES_PER_PROCESS = null
|
||||
|
||||
#fileLogCount = 0
|
||||
#totalLogCount = 0
|
||||
|
@ -48,7 +50,7 @@ class LogFiles {
|
|||
} = {}) {
|
||||
this.#logId = LogFiles.logId(new Date())
|
||||
this.#MAX_LOGS_PER_FILE = maxLogsPerFile
|
||||
this.#MAX_LOG_FILES_PER_PROCESS = maxFilesPerProcess
|
||||
this.#MAX_FILES_PER_PROCESS = maxFilesPerProcess
|
||||
this.on()
|
||||
}
|
||||
|
||||
|
@ -56,10 +58,6 @@ class LogFiles {
|
|||
return d.toISOString().replace(/[.:]/g, '_')
|
||||
}
|
||||
|
||||
static fileName (prefix, suffix) {
|
||||
return `${prefix}-debug-${suffix}.log`
|
||||
}
|
||||
|
||||
static format (count, level, title, ...args) {
|
||||
let prefix = `${count} ${level}`
|
||||
if (title) {
|
||||
|
@ -149,7 +147,7 @@ class LogFiles {
|
|||
if (this.#fileLogCount >= this.#MAX_LOGS_PER_FILE) {
|
||||
// Write last chunk to the file and close it
|
||||
this[_endStream](logOutput)
|
||||
if (this.#files.length >= this.#MAX_LOG_FILES_PER_PROCESS) {
|
||||
if (this.#files.length >= this.#MAX_FILES_PER_PROCESS) {
|
||||
// but if its way too many then we just stop listening
|
||||
this.off()
|
||||
} else {
|
||||
|
@ -166,23 +164,21 @@ class LogFiles {
|
|||
return LogFiles.format(this.#totalLogCount++, ...args)
|
||||
}
|
||||
|
||||
[_getLogFilePath] (prefix, suffix) {
|
||||
return path.resolve(this.#dir, LogFiles.fileName(prefix, suffix))
|
||||
[_getLogFilePath] (prefix, suffix, sep = '-') {
|
||||
return path.resolve(this.#dir, prefix + sep + 'debug' + sep + suffix + '.log')
|
||||
}
|
||||
|
||||
[_openLogFile] () {
|
||||
// Count in filename will be 0 indexed
|
||||
const count = this.#files.length
|
||||
|
||||
// Pad with zeros so that our log files are always sorted properly
|
||||
// We never want to write files ending in `-9.log` and `-10.log` because
|
||||
// log file cleaning is done by deleting the oldest so in this example
|
||||
// `-10.log` would be deleted next
|
||||
const countDigits = this.#MAX_LOG_FILES_PER_PROCESS.toString().length
|
||||
|
||||
try {
|
||||
const logStream = withChownSync(
|
||||
this[_getLogFilePath](this.#logId, count.toString().padStart(countDigits, '0')),
|
||||
// Pad with zeros so that our log files are always sorted properly
|
||||
// We never want to write files ending in `-9.log` and `-10.log` because
|
||||
// log file cleaning is done by deleting the oldest so in this example
|
||||
// `-10.log` would be deleted next
|
||||
this[_getLogFilePath](this.#logId, padZero(count, this.#MAX_FILES_PER_PROCESS)),
|
||||
// Some effort was made to make the async, but we need to write logs
|
||||
// during process.on('exit') which has to be synchronous. So in order
|
||||
// to never drop log messages, it is easiest to make it sync all the time
|
||||
|
@ -214,14 +210,13 @@ class LogFiles {
|
|||
return
|
||||
}
|
||||
|
||||
// Add 1 to account for the current log file and make
|
||||
// minimum config 0 so current log file is never deleted
|
||||
// XXX: we should make a separate documented option to
|
||||
// disable log file writing
|
||||
const max = Math.max(this.#logsMax, 0) + 1
|
||||
try {
|
||||
const files = await glob(this[_getLogFilePath]('*', '*'))
|
||||
const toDelete = files.length - max
|
||||
// Handle the old (prior to 8.2.0) log file names which did not have an counter suffix
|
||||
// so match by anything after `-debug` and before `.log` (including nothing)
|
||||
const logGlob = this[_getLogFilePath]('*-', '*', '')
|
||||
// Always ignore the currently written files
|
||||
const files = await glob(logGlob, { ignore: this.#files })
|
||||
const toDelete = files.length - this.#logsMax
|
||||
|
||||
if (toDelete <= 0) {
|
||||
return
|
||||
|
@ -233,7 +228,7 @@ class LogFiles {
|
|||
try {
|
||||
await rimraf(file)
|
||||
} catch (e) {
|
||||
log.warn('logfile', 'error removing log file', file, e)
|
||||
log.silly('logfile', 'error removing log file', file, e)
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
|
|
|
@ -26,7 +26,7 @@ example, running \fBnpm ls promzard\fP in npm's source tree will show:
|
|||
.P
|
||||
.RS 2
|
||||
.nf
|
||||
npm@8\.2\.0 /path/to/npm
|
||||
npm@8\.3\.0 /path/to/npm
|
||||
└─┬ init\-package\-json@0\.0\.4
|
||||
└── promzard@0\.1\.5
|
||||
.fi
|
||||
|
|
|
@ -10,7 +10,7 @@ npm <command> [args]
|
|||
.RE
|
||||
.SS Version
|
||||
.P
|
||||
8\.2\.0
|
||||
8\.3\.0
|
||||
.SS Description
|
||||
.P
|
||||
npm is the package manager for the Node JavaScript platform\. It puts
|
||||
|
|
|
@ -960,6 +960,120 @@ if (foo) {
|
|||
.P
|
||||
Entries in \fBoptionalDependencies\fP will override entries of the same name in
|
||||
\fBdependencies\fP, so it's usually best to only put in one place\.
|
||||
.SS overrides
|
||||
.P
|
||||
If you need to make specific changes to dependencies of your dependencies, for
|
||||
example replacing the version of a dependency with a known security issue,
|
||||
replacing an existing dependency with a fork, or making sure that the same
|
||||
version of a package is used everywhere, then you may add an override\.
|
||||
.P
|
||||
Overrides provide a way to replace a package in your dependency tree with
|
||||
another version, or another package entirely\. These changes can be scoped as
|
||||
specific or as vague as desired\.
|
||||
.P
|
||||
To make sure the package \fBfoo\fP is always installed as version \fB1\.0\.0\fP no matter
|
||||
what version your dependencies rely on:
|
||||
.P
|
||||
.RS 2
|
||||
.nf
|
||||
{
|
||||
"overrides": {
|
||||
"foo": "1\.0\.0"
|
||||
}
|
||||
}
|
||||
.fi
|
||||
.RE
|
||||
.P
|
||||
The above is a short hand notation, the full object form can be used to allow
|
||||
overriding a package itself as well as a child of the package\. This will cause
|
||||
\fBfoo\fP to always be \fB1\.0\.0\fP while also making \fBbar\fP at any depth beyond \fBfoo\fP
|
||||
also \fB1\.0\.0\fP:
|
||||
.P
|
||||
.RS 2
|
||||
.nf
|
||||
{
|
||||
"overrides": {
|
||||
"foo": {
|
||||
"\.": "1\.0\.0",
|
||||
"bar": "1\.0\.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
.fi
|
||||
.RE
|
||||
.P
|
||||
To only override \fBfoo\fP to be \fB1\.0\.0\fP when it's a child (or grandchild, or great
|
||||
grandchild, etc) of the package \fBbar\fP:
|
||||
.P
|
||||
.RS 2
|
||||
.nf
|
||||
{
|
||||
"overrides": {
|
||||
"bar": {
|
||||
"foo": "1\.0\.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
.fi
|
||||
.RE
|
||||
.P
|
||||
Keys can be nested to any arbitrary length\. To override \fBfoo\fP only when it's a
|
||||
child of \fBbar\fP and only when \fBbar\fP is a child of \fBbaz\fP:
|
||||
.P
|
||||
.RS 2
|
||||
.nf
|
||||
{
|
||||
"overrides": {
|
||||
"baz": {
|
||||
"bar": {
|
||||
"foo": "1\.0\.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.fi
|
||||
.RE
|
||||
.P
|
||||
The key of an override can also include a version, or range of versions\.
|
||||
To override \fBfoo\fP to \fB1\.0\.0\fP, but only when it's a child of \fBbar@2\.0\.0\fP:
|
||||
.P
|
||||
.RS 2
|
||||
.nf
|
||||
{
|
||||
"overrides": {
|
||||
"bar@2\.0\.0": {
|
||||
"foo": "1\.0\.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
.fi
|
||||
.RE
|
||||
.P
|
||||
You may not set an override for a package that you directly depend on unless
|
||||
both the dependency and the override itself share the exact same spec\. To make
|
||||
this limitation easier to deal with, overrides may also be defined as a
|
||||
reference to a spec for a direct dependency by prefixing the name of the
|
||||
package you wish the version to match with a \fB$\fP\|\.
|
||||
.P
|
||||
.RS 2
|
||||
.nf
|
||||
{
|
||||
"dependencies": {
|
||||
"foo": "^1\.0\.0"
|
||||
},
|
||||
"overrides": {
|
||||
// BAD, will throw an EOVERRIDE error
|
||||
// "foo": "^2\.0\.0"
|
||||
// GOOD, specs match so override is allowed
|
||||
// "foo": "^1\.0\.0"
|
||||
// BEST, the override is defined as a reference to the dependency
|
||||
"foo": "$foo",
|
||||
// the referenced package does not need to match the overridden one
|
||||
"bar": "$foo"
|
||||
}
|
||||
}
|
||||
.fi
|
||||
.RE
|
||||
.SS engines
|
||||
.P
|
||||
You can specify the version of node that your stuff works on:
|
||||
|
|
|
@ -4,8 +4,8 @@ Inspect and manage `node_modules` trees.
|
|||
|
||||
![a tree with the word ARBORIST superimposed on it](https://raw.githubusercontent.com/npm/arborist/main/docs/logo.svg?sanitize=true)
|
||||
|
||||
There's more documentation [in the notes
|
||||
folder](https://github.com/npm/arborist/tree/main/notes).
|
||||
There's more documentation [in the docs
|
||||
folder](https://github.com/npm/arborist/tree/main/docs).
|
||||
|
||||
## USAGE
|
||||
|
||||
|
|
|
@ -379,6 +379,7 @@ module.exports = cls => class IdealTreeBuilder extends cls {
|
|||
optional: false,
|
||||
global: this[_global],
|
||||
legacyPeerDeps: this.legacyPeerDeps,
|
||||
loadOverrides: true,
|
||||
})
|
||||
if (root.isLink) {
|
||||
root.target = new Node({
|
||||
|
@ -676,6 +677,7 @@ module.exports = cls => class IdealTreeBuilder extends cls {
|
|||
// calls rather than walking over everything in the tree.
|
||||
const set = this.idealTree.inventory
|
||||
.filter(n => this[_shouldUpdateNode](n))
|
||||
// XXX add any invalid edgesOut to the queue
|
||||
for (const node of set) {
|
||||
for (const edge of node.edgesIn) {
|
||||
this.addTracker('idealTree', edge.from.name, edge.from.location)
|
||||
|
@ -772,7 +774,10 @@ This is a one-time fix-up, please be patient...
|
|||
[_buildDeps] () {
|
||||
process.emit('time', 'idealTree:buildDeps')
|
||||
const tree = this.idealTree.target
|
||||
tree.assertRootOverrides()
|
||||
this[_depsQueue].push(tree)
|
||||
// XXX also push anything that depends on a node with a name
|
||||
// in the override list
|
||||
this.log.silly('idealTree', 'buildDeps')
|
||||
this.addTracker('idealTree', tree.name, '')
|
||||
return this[_buildDepStep]()
|
||||
|
@ -1112,6 +1117,7 @@ This is a one-time fix-up, please be patient...
|
|||
path: node.realpath,
|
||||
sourceReference: node,
|
||||
legacyPeerDeps: this.legacyPeerDeps,
|
||||
overrides: node.overrides,
|
||||
})
|
||||
|
||||
// also need to set up any targets from any link deps, so that
|
||||
|
|
|
@ -127,6 +127,7 @@ module.exports = cls => class ActualLoader extends cls {
|
|||
realpath: real,
|
||||
pkg: {},
|
||||
global,
|
||||
loadOverrides: true,
|
||||
})
|
||||
return this[_loadActualActually]({ root, ignoreMissing, global })
|
||||
}
|
||||
|
@ -135,8 +136,11 @@ module.exports = cls => class ActualLoader extends cls {
|
|||
this[_actualTree] = await this[_loadFSNode]({
|
||||
path: this.path,
|
||||
real: await realpath(this.path, this[_rpcache], this[_stcache]),
|
||||
loadOverrides: true,
|
||||
})
|
||||
|
||||
this[_actualTree].assertRootOverrides()
|
||||
|
||||
// Note: hidden lockfile will be rejected if it's not the latest thing
|
||||
// in the folder, or if any of the entries in the hidden lockfile are
|
||||
// missing.
|
||||
|
@ -236,13 +240,26 @@ module.exports = cls => class ActualLoader extends cls {
|
|||
this[_actualTree] = root
|
||||
}
|
||||
|
||||
[_loadFSNode] ({ path, parent, real, root }) {
|
||||
[_loadFSNode] ({ path, parent, real, root, loadOverrides }) {
|
||||
if (!real) {
|
||||
return realpath(path, this[_rpcache], this[_stcache])
|
||||
.then(
|
||||
real => this[_loadFSNode]({ path, parent, real, root }),
|
||||
real => this[_loadFSNode]({
|
||||
path,
|
||||
parent,
|
||||
real,
|
||||
root,
|
||||
loadOverrides,
|
||||
}),
|
||||
// if realpath fails, just provide a dummy error node
|
||||
error => new Node({ error, path, realpath: path, parent, root })
|
||||
error => new Node({
|
||||
error,
|
||||
path,
|
||||
realpath: path,
|
||||
parent,
|
||||
root,
|
||||
loadOverrides,
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -271,6 +288,7 @@ module.exports = cls => class ActualLoader extends cls {
|
|||
error,
|
||||
parent,
|
||||
root,
|
||||
loadOverrides,
|
||||
})
|
||||
})
|
||||
.then(node => {
|
||||
|
|
|
@ -72,6 +72,7 @@ module.exports = cls => class VirtualLoader extends cls {
|
|||
this[rootOptionProvided] = options.root
|
||||
|
||||
await this[loadFromShrinkwrap](s, root)
|
||||
root.assertRootOverrides()
|
||||
return treeCheck(this.virtualTree)
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ class ArboristEdge {}
|
|||
const printableEdge = (edge) => {
|
||||
const edgeFrom = edge.from && edge.from.location
|
||||
const edgeTo = edge.to && edge.to.location
|
||||
const override = edge.overrides && edge.overrides.value
|
||||
|
||||
return Object.assign(new ArboristEdge(), {
|
||||
name: edge.name,
|
||||
|
@ -38,12 +39,13 @@ const printableEdge = (edge) => {
|
|||
...(edgeTo ? { to: edgeTo } : {}),
|
||||
...(edge.error ? { error: edge.error } : {}),
|
||||
...(edge.peerConflicted ? { peerConflicted: true } : {}),
|
||||
...(override ? { overridden: override } : {}),
|
||||
})
|
||||
}
|
||||
|
||||
class Edge {
|
||||
constructor (options) {
|
||||
const { type, name, spec, accept, from } = options
|
||||
const { type, name, spec, accept, from, overrides } = options
|
||||
|
||||
if (typeof spec !== 'string') {
|
||||
throw new TypeError('must provide string spec')
|
||||
|
@ -55,6 +57,10 @@ class Edge {
|
|||
|
||||
this[_spec] = spec
|
||||
|
||||
if (overrides !== undefined) {
|
||||
this.overrides = overrides
|
||||
}
|
||||
|
||||
if (accept !== undefined) {
|
||||
if (typeof accept !== 'string') {
|
||||
throw new TypeError('accept field must be a string if provided')
|
||||
|
@ -82,8 +88,11 @@ class Edge {
|
|||
}
|
||||
|
||||
satisfiedBy (node) {
|
||||
return node.name === this.name &&
|
||||
depValid(node, this.spec, this.accept, this.from)
|
||||
if (node.name !== this.name) {
|
||||
return false
|
||||
}
|
||||
|
||||
return depValid(node, this.spec, this.accept, this.from)
|
||||
}
|
||||
|
||||
explain (seen = []) {
|
||||
|
@ -101,6 +110,10 @@ class Edge {
|
|||
type: this.type,
|
||||
name: this.name,
|
||||
spec: this.spec,
|
||||
...(this.rawSpec !== this.spec ? {
|
||||
rawSpec: this.rawSpec,
|
||||
overridden: true,
|
||||
} : {}),
|
||||
...(bundled ? { bundled } : {}),
|
||||
...(error ? { error } : {}),
|
||||
...(from ? { from: from.explain(null, seen) } : {}),
|
||||
|
@ -143,7 +156,28 @@ class Edge {
|
|||
return this[_name]
|
||||
}
|
||||
|
||||
get rawSpec () {
|
||||
return this[_spec]
|
||||
}
|
||||
|
||||
get spec () {
|
||||
if (this.overrides && this.overrides.value && this.overrides.name === this.name) {
|
||||
if (this.overrides.value.startsWith('$')) {
|
||||
const ref = this.overrides.value.slice(1)
|
||||
const pkg = this.from.root.package
|
||||
const overrideSpec = (pkg.devDependencies && pkg.devDependencies[ref]) ||
|
||||
(pkg.optionalDependencies && pkg.optionalDependencies[ref]) ||
|
||||
(pkg.dependencies && pkg.dependencies[ref]) ||
|
||||
(pkg.peerDependencies && pkg.peerDependencies[ref])
|
||||
|
||||
if (overrideSpec) {
|
||||
return overrideSpec
|
||||
}
|
||||
|
||||
throw new Error(`Unable to resolve reference ${this.overrides.value}`)
|
||||
}
|
||||
return this.overrides.value
|
||||
}
|
||||
return this[_spec]
|
||||
}
|
||||
|
||||
|
@ -213,6 +247,7 @@ class Edge {
|
|||
if (node.edgesOut.has(this.name)) {
|
||||
node.edgesOut.get(this.name).detach()
|
||||
}
|
||||
|
||||
node.addEdgeOut(this)
|
||||
this.reload()
|
||||
}
|
||||
|
|
|
@ -32,6 +32,7 @@ const semver = require('semver')
|
|||
const nameFromFolder = require('@npmcli/name-from-folder')
|
||||
const Edge = require('./edge.js')
|
||||
const Inventory = require('./inventory.js')
|
||||
const OverrideSet = require('./override-set.js')
|
||||
const { normalize } = require('read-package-json-fast')
|
||||
const { getPaths: getBinPaths } = require('bin-links')
|
||||
const npa = require('npm-package-arg')
|
||||
|
@ -88,6 +89,8 @@ class Node {
|
|||
legacyPeerDeps = false,
|
||||
linksIn,
|
||||
hasShrinkwrap,
|
||||
overrides,
|
||||
loadOverrides = false,
|
||||
extraneous = true,
|
||||
dev = true,
|
||||
optional = true,
|
||||
|
@ -190,6 +193,17 @@ class Node {
|
|||
// because this.package is read when adding to inventory
|
||||
this[_package] = pkg && typeof pkg === 'object' ? pkg : {}
|
||||
|
||||
if (overrides) {
|
||||
this.overrides = overrides
|
||||
} else if (loadOverrides) {
|
||||
const overrides = this[_package].overrides || {}
|
||||
if (Object.keys(overrides).length > 0) {
|
||||
this.overrides = new OverrideSet({
|
||||
overrides: this[_package].overrides,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// only relevant for the root and top nodes
|
||||
this.meta = meta
|
||||
|
||||
|
@ -963,6 +977,11 @@ class Node {
|
|||
return false
|
||||
}
|
||||
|
||||
// XXX need to check for two root nodes?
|
||||
if (node.overrides !== this.overrides) {
|
||||
return false
|
||||
}
|
||||
|
||||
ignorePeers = new Set(ignorePeers)
|
||||
|
||||
// gather up all the deps of this node and that are only depended
|
||||
|
@ -1208,6 +1227,10 @@ class Node {
|
|||
this[_changePath](newPath)
|
||||
}
|
||||
|
||||
if (parent.overrides) {
|
||||
this.overrides = parent.overrides.getNodeRule(this)
|
||||
}
|
||||
|
||||
// clobbers anything at that path, resets all appropriate references
|
||||
this.root = parent.root
|
||||
}
|
||||
|
@ -1279,11 +1302,33 @@ class Node {
|
|||
}
|
||||
}
|
||||
|
||||
assertRootOverrides () {
|
||||
if (!this.isProjectRoot || !this.overrides) {
|
||||
return
|
||||
}
|
||||
|
||||
for (const edge of this.edgesOut.values()) {
|
||||
// if these differ an override has been applied, those are not allowed
|
||||
// for top level dependencies so throw an error
|
||||
if (edge.spec !== edge.rawSpec && !edge.spec.startsWith('$')) {
|
||||
throw Object.assign(new Error(`Override for ${edge.name}@${edge.rawSpec} conflicts with direct dependency`), { code: 'EOVERRIDE' })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
addEdgeOut (edge) {
|
||||
if (this.overrides) {
|
||||
edge.overrides = this.overrides.getEdgeRule(edge)
|
||||
}
|
||||
|
||||
this.edgesOut.set(edge.name, edge)
|
||||
}
|
||||
|
||||
addEdgeIn (edge) {
|
||||
if (edge.overrides) {
|
||||
this.overrides = edge.overrides
|
||||
}
|
||||
|
||||
this.edgesIn.add(edge)
|
||||
|
||||
// try to get metadata from the yarn.lock file
|
||||
|
|
|
@ -0,0 +1,123 @@
|
|||
const npa = require('npm-package-arg')
|
||||
const semver = require('semver')
|
||||
|
||||
class OverrideSet {
|
||||
constructor ({ overrides, key, parent }) {
|
||||
this.parent = parent
|
||||
this.children = new Map()
|
||||
|
||||
if (typeof overrides === 'string') {
|
||||
overrides = { '.': overrides }
|
||||
}
|
||||
|
||||
// change a literal empty string to * so we can use truthiness checks on
|
||||
// the value property later
|
||||
if (overrides['.'] === '') {
|
||||
overrides['.'] = '*'
|
||||
}
|
||||
|
||||
if (parent) {
|
||||
const spec = npa(key)
|
||||
if (!spec.name) {
|
||||
throw new Error(`Override without name: ${key}`)
|
||||
}
|
||||
|
||||
this.name = spec.name
|
||||
spec.name = ''
|
||||
this.key = key
|
||||
this.keySpec = spec.rawSpec === '' ? '' : spec.toString()
|
||||
this.value = overrides['.'] || this.keySpec
|
||||
}
|
||||
|
||||
for (const [key, childOverrides] of Object.entries(overrides)) {
|
||||
if (key === '.') {
|
||||
continue
|
||||
}
|
||||
|
||||
const child = new OverrideSet({
|
||||
parent: this,
|
||||
key,
|
||||
overrides: childOverrides,
|
||||
})
|
||||
|
||||
this.children.set(child.key, child)
|
||||
}
|
||||
}
|
||||
|
||||
getEdgeRule (edge) {
|
||||
for (const rule of this.ruleset.values()) {
|
||||
if (rule.name !== edge.name) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (rule.keySpec === '' ||
|
||||
semver.intersects(edge.spec, rule.keySpec)) {
|
||||
return rule
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getNodeRule (node) {
|
||||
for (const rule of this.ruleset.values()) {
|
||||
if (rule.name !== node.name) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (rule.keySpec === '' ||
|
||||
semver.satisfies(node.version, rule.keySpec) ||
|
||||
semver.satisfies(node.version, rule.value)) {
|
||||
return rule
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getMatchingRule (node) {
|
||||
for (const rule of this.ruleset.values()) {
|
||||
if (rule.name !== node.name) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (rule.keySpec === '' ||
|
||||
semver.satisfies(node.version, rule.keySpec) ||
|
||||
semver.satisfies(node.version, rule.value)) {
|
||||
return rule
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
* ancestry () {
|
||||
for (let ancestor = this; ancestor; ancestor = ancestor.parent) {
|
||||
yield ancestor
|
||||
}
|
||||
}
|
||||
|
||||
get isRoot () {
|
||||
return !this.parent
|
||||
}
|
||||
|
||||
get ruleset () {
|
||||
const ruleset = new Map()
|
||||
|
||||
for (const override of this.ancestry()) {
|
||||
for (const kid of override.children.values()) {
|
||||
if (!ruleset.has(kid.key)) {
|
||||
ruleset.set(kid.key, kid)
|
||||
}
|
||||
}
|
||||
|
||||
if (!override.isRoot && !ruleset.has(override.key)) {
|
||||
ruleset.set(override.key, override)
|
||||
}
|
||||
}
|
||||
|
||||
return ruleset
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = OverrideSet
|
|
@ -295,6 +295,7 @@ class PlaceDep {
|
|||
integrity: dep.integrity,
|
||||
legacyPeerDeps: this.legacyPeerDeps,
|
||||
error: dep.errors[0],
|
||||
...(dep.overrides ? { overrides: dep.overrides } : {}),
|
||||
...(dep.isLink ? { target: dep.target, realpath: dep.realpath } : {}),
|
||||
})
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// helper function to output a clearer visualization
|
||||
// of the current node and its descendents
|
||||
|
||||
const localeCompare = require('@isaacs/string-locale-compare')('en')
|
||||
const util = require('util')
|
||||
const relpath = require('./relpath.js')
|
||||
|
@ -65,6 +64,11 @@ class ArboristNode {
|
|||
this.errors = tree.errors.map(treeError)
|
||||
}
|
||||
|
||||
if (tree.overrides) {
|
||||
this.overrides = new Map([...tree.overrides.ruleset.values()]
|
||||
.map((override) => [override.key, override.value]))
|
||||
}
|
||||
|
||||
// edgesOut sorted by name
|
||||
if (tree.edgesOut.size) {
|
||||
this.edgesOut = new Map([...tree.edgesOut.entries()]
|
||||
|
@ -126,7 +130,10 @@ class Edge {
|
|||
constructor (edge) {
|
||||
this.type = edge.type
|
||||
this.name = edge.name
|
||||
this.spec = edge.spec || '*'
|
||||
this.spec = edge.rawSpec || '*'
|
||||
if (edge.rawSpec !== edge.spec) {
|
||||
this.override = edge.spec
|
||||
}
|
||||
if (edge.error) {
|
||||
this.error = edge.error
|
||||
}
|
||||
|
@ -145,6 +152,8 @@ class EdgeOut extends Edge {
|
|||
|
||||
[util.inspect.custom] () {
|
||||
return `{ ${this.type} ${this.name}@${this.spec}${
|
||||
this.override ? ` overridden:${this.override}` : ''
|
||||
}${
|
||||
this.to ? ' -> ' + this.to : ''
|
||||
}${
|
||||
this.error ? ' ' + this.error : ''
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@npmcli/arborist",
|
||||
"version": "4.0.5",
|
||||
"version": "4.1.1",
|
||||
"description": "Manage node_modules trees",
|
||||
"dependencies": {
|
||||
"@isaacs/string-locale-compare": "^1.1.0",
|
||||
|
@ -24,7 +24,7 @@
|
|||
"npm-pick-manifest": "^6.1.0",
|
||||
"npm-registry-fetch": "^11.0.0",
|
||||
"pacote": "^12.0.2",
|
||||
"parse-conflict-json": "^1.1.1",
|
||||
"parse-conflict-json": "^2.0.1",
|
||||
"proc-log": "^1.0.0",
|
||||
"promise-all-reject-late": "^1.0.0",
|
||||
"promise-call-limit": "^1.0.1",
|
||||
|
@ -37,10 +37,11 @@
|
|||
"walk-up-path": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@npmcli/template-oss": "^2.3.0",
|
||||
"@npmcli/template-oss": "^2.3.1",
|
||||
"benchmark": "^2.1.4",
|
||||
"chalk": "^4.1.0",
|
||||
"minify-registry-metadata": "^2.1.0",
|
||||
"nock": "^13.2.0",
|
||||
"tap": "^15.1.2",
|
||||
"tcompare": "^5.0.6"
|
||||
},
|
||||
|
@ -93,7 +94,7 @@
|
|||
"engines": {
|
||||
"node": "^12.13.0 || ^14.15.0 || >=16"
|
||||
},
|
||||
"templateVersion": "2.3.0",
|
||||
"templateVersion": "2.3.1",
|
||||
"eslintIgnore": [
|
||||
"test/fixtures/",
|
||||
"!test/fixtures/*.js"
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
/*
|
||||
const obj1 = {a: 3, b: 5};
|
||||
diffApply(obj1,
|
||||
[
|
||||
{ "op": "remove", "path": ['b'] },
|
||||
{ "op": "replace", "path": ['a'], "value": 4 },
|
||||
{ "op": "add", "path": ['c'], "value": 5 }
|
||||
]
|
||||
);
|
||||
obj1; // {a: 4, c: 5}
|
||||
|
||||
// using converter to apply jsPatch standard paths
|
||||
// see http://jsonpatch.com
|
||||
import {diff, jsonPatchPathConverter} from 'just-diff'
|
||||
const obj2 = {a: 3, b: 5};
|
||||
diffApply(obj2, [
|
||||
{ "op": "remove", "path": '/b' },
|
||||
{ "op": "replace", "path": '/a', "value": 4 }
|
||||
{ "op": "add", "path": '/c', "value": 5 }
|
||||
], jsonPatchPathConverter);
|
||||
obj2; // {a: 4, c: 5}
|
||||
|
||||
// arrays
|
||||
const obj3 = {a: 4, b: [1, 2, 3]};
|
||||
diffApply(obj3, [
|
||||
{ "op": "replace", "path": ['a'], "value": 3 }
|
||||
{ "op": "replace", "path": ['b', 2], "value": 4 }
|
||||
{ "op": "add", "path": ['b', 3], "value": 9 }
|
||||
]);
|
||||
obj3; // {a: 3, b: [1, 2, 4, 9]}
|
||||
|
||||
// nested paths
|
||||
const obj4 = {a: 4, b: {c: 3}};
|
||||
diffApply(obj4, [
|
||||
{ "op": "replace", "path": ['a'], "value": 5 }
|
||||
{ "op": "remove", "path": ['b', 'c']}
|
||||
{ "op": "add", "path": ['b', 'd'], "value": 4 }
|
||||
]);
|
||||
obj4; // {a: 5, b: {d: 4}}
|
||||
*/
|
||||
|
||||
var REMOVE = 'remove';
|
||||
var REPLACE = 'replace';
|
||||
var ADD = 'add';
|
||||
|
||||
function diffApply(obj, diff, pathConverter) {
|
||||
if (!obj || typeof obj != 'object') {
|
||||
throw new Error('base object must be an object or an array');
|
||||
}
|
||||
|
||||
if (!Array.isArray(diff)) {
|
||||
throw new Error('diff must be an array');
|
||||
}
|
||||
|
||||
var diffLength = diff.length;
|
||||
for (var i = 0; i < diffLength; i++) {
|
||||
var thisDiff = diff[i];
|
||||
var subObject = obj;
|
||||
var thisOp = thisDiff.op;
|
||||
var thisPath = thisDiff.path;
|
||||
if (pathConverter) {
|
||||
thisPath = pathConverter(thisPath);
|
||||
if (!Array.isArray(thisPath)) {
|
||||
throw new Error('pathConverter must return an array');
|
||||
}
|
||||
} else {
|
||||
if (!Array.isArray(thisPath)) {
|
||||
throw new Error(
|
||||
'diff path must be an array, consider supplying a path converter'
|
||||
);
|
||||
}
|
||||
}
|
||||
var pathCopy = thisPath.slice();
|
||||
var lastProp = pathCopy.pop();
|
||||
if (lastProp == null) {
|
||||
return false;
|
||||
}
|
||||
var thisProp;
|
||||
while ((thisProp = pathCopy.shift()) != null) {
|
||||
if (!(thisProp in subObject)) {
|
||||
subObject[thisProp] = {};
|
||||
}
|
||||
subObject = subObject[thisProp];
|
||||
}
|
||||
if (thisOp === REMOVE || thisOp === REPLACE) {
|
||||
if (!subObject.hasOwnProperty(lastProp)) {
|
||||
throw new Error(
|
||||
['expected to find property', thisDiff.path, 'in object', obj].join(
|
||||
' '
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
if (thisOp === REMOVE) {
|
||||
Array.isArray(subObject)
|
||||
? subObject.splice(lastProp, 1)
|
||||
: delete subObject[lastProp];
|
||||
}
|
||||
if (thisOp === REPLACE || thisOp === ADD) {
|
||||
subObject[lastProp] = thisDiff.value;
|
||||
}
|
||||
}
|
||||
return subObject;
|
||||
}
|
||||
|
||||
function jsonPatchPathConverter(stringPath) {
|
||||
return stringPath.split('/').slice(1);
|
||||
}
|
||||
|
||||
export {diffApply, jsonPatchPathConverter};
|
|
@ -1,10 +1,18 @@
|
|||
{
|
||||
"name": "just-diff-apply",
|
||||
"version": "3.0.0",
|
||||
"version": "4.0.1",
|
||||
"description": "Apply a diff to an object. Optionally supports jsonPatch protocol",
|
||||
"main": "index.js",
|
||||
"module": "index.mjs",
|
||||
"exports": {
|
||||
".": {
|
||||
"require": "./index.js",
|
||||
"default": "./index.mjs"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build": "rollup -c"
|
||||
},
|
||||
"repository": "https://github.com/angus-c/just",
|
||||
"keywords": [
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
const createRollupConfig = require('../../config/createRollupConfig');
|
||||
|
||||
module.exports = createRollupConfig(__dirname);
|
|
@ -0,0 +1,146 @@
|
|||
/*
|
||||
const obj1 = {a: 4, b: 5};
|
||||
const obj2 = {a: 3, b: 5};
|
||||
const obj3 = {a: 4, c: 5};
|
||||
|
||||
diff(obj1, obj2);
|
||||
[
|
||||
{ "op": "replace", "path": ['a'], "value": 3 }
|
||||
]
|
||||
|
||||
diff(obj2, obj3);
|
||||
[
|
||||
{ "op": "remove", "path": ['b'] },
|
||||
{ "op": "replace", "path": ['a'], "value": 4 }
|
||||
{ "op": "add", "path": ['c'], "value": 5 }
|
||||
]
|
||||
|
||||
// using converter to generate jsPatch standard paths
|
||||
// see http://jsonpatch.com
|
||||
import {diff, jsonPatchPathConverter} from 'just-diff'
|
||||
diff(obj1, obj2, jsonPatchPathConverter);
|
||||
[
|
||||
{ "op": "replace", "path": '/a', "value": 3 }
|
||||
]
|
||||
|
||||
diff(obj2, obj3, jsonPatchPathConverter);
|
||||
[
|
||||
{ "op": "remove", "path": '/b' },
|
||||
{ "op": "replace", "path": '/a', "value": 4 }
|
||||
{ "op": "add", "path": '/c', "value": 5 }
|
||||
]
|
||||
|
||||
// arrays
|
||||
const obj4 = {a: 4, b: [1, 2, 3]};
|
||||
const obj5 = {a: 3, b: [1, 2, 4]};
|
||||
const obj6 = {a: 3, b: [1, 2, 4, 5]};
|
||||
|
||||
diff(obj4, obj5);
|
||||
[
|
||||
{ "op": "replace", "path": ['a'], "value": 3 }
|
||||
{ "op": "replace", "path": ['b', 2], "value": 4 }
|
||||
]
|
||||
|
||||
diff(obj5, obj6);
|
||||
[
|
||||
{ "op": "add", "path": ['b', 3], "value": 5 }
|
||||
]
|
||||
|
||||
// nested paths
|
||||
const obj7 = {a: 4, b: {c: 3}};
|
||||
const obj8 = {a: 4, b: {c: 4}};
|
||||
const obj9 = {a: 5, b: {d: 4}};
|
||||
|
||||
diff(obj7, obj8);
|
||||
[
|
||||
{ "op": "replace", "path": ['b', 'c'], "value": 4 }
|
||||
]
|
||||
|
||||
diff(obj8, obj9);
|
||||
[
|
||||
{ "op": "replace", "path": ['a'], "value": 5 }
|
||||
{ "op": "remove", "path": ['b', 'c']}
|
||||
{ "op": "add", "path": ['b', 'd'], "value": 4 }
|
||||
]
|
||||
*/
|
||||
|
||||
function diff(obj1, obj2, pathConverter) {
|
||||
if (!obj1 || typeof obj1 != 'object' || !obj2 || typeof obj2 != 'object') {
|
||||
throw new Error('both arguments must be objects or arrays');
|
||||
}
|
||||
|
||||
pathConverter ||
|
||||
(pathConverter = function(arr) {
|
||||
return arr;
|
||||
});
|
||||
|
||||
function getDiff(obj1, obj2, basePath, diffs) {
|
||||
var obj1Keys = Object.keys(obj1);
|
||||
var obj1KeysLength = obj1Keys.length;
|
||||
var obj2Keys = Object.keys(obj2);
|
||||
var obj2KeysLength = obj2Keys.length;
|
||||
var path;
|
||||
|
||||
for (var i = 0; i < obj1KeysLength; i++) {
|
||||
var key = Array.isArray(obj1) ? Number(obj1Keys[i]) : obj1Keys[i];
|
||||
if (!(key in obj2)) {
|
||||
path = basePath.concat(key);
|
||||
diffs.remove.push({
|
||||
op: 'remove',
|
||||
path: pathConverter(path),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (var i = 0; i < obj2KeysLength; i++) {
|
||||
var key = Array.isArray(obj2) ? Number(obj2Keys[i]) : obj2Keys[i];
|
||||
var obj1AtKey = obj1[key];
|
||||
var obj2AtKey = obj2[key];
|
||||
if (!(key in obj1)) {
|
||||
path = basePath.concat(key);
|
||||
var obj2Value = obj2[key];
|
||||
diffs.add.push({
|
||||
op: 'add',
|
||||
path: pathConverter(path),
|
||||
value: obj2Value,
|
||||
});
|
||||
} else if (obj1AtKey !== obj2AtKey) {
|
||||
if (
|
||||
Object(obj1AtKey) !== obj1AtKey ||
|
||||
Object(obj2AtKey) !== obj2AtKey
|
||||
) {
|
||||
path = pushReplace(path, basePath, key, diffs, pathConverter, obj2);
|
||||
} else {
|
||||
if (
|
||||
!Object.keys(obj1AtKey).length &&
|
||||
!Object.keys(obj2AtKey).length &&
|
||||
String(obj1AtKey) != String(obj2AtKey)
|
||||
) {
|
||||
path = pushReplace(path, basePath, key, diffs, pathConverter, obj2);
|
||||
} else {
|
||||
getDiff(obj1[key], obj2[key], basePath.concat(key), diffs);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return diffs.remove.reverse().concat(diffs.replace).concat(diffs.add);
|
||||
}
|
||||
return getDiff(obj1, obj2, [], {remove: [], replace: [], add: []});
|
||||
}
|
||||
|
||||
function pushReplace(path, basePath, key, diffs, pathConverter, obj2) {
|
||||
path = basePath.concat(key);
|
||||
diffs.replace.push({
|
||||
op: 'replace',
|
||||
path: pathConverter(path),
|
||||
value: obj2[key],
|
||||
});
|
||||
return path;
|
||||
}
|
||||
|
||||
function jsonPatchPathConverter(arrayPath) {
|
||||
return [''].concat(arrayPath).join('/');
|
||||
}
|
||||
|
||||
export {diff, jsonPatchPathConverter};
|
|
@ -1,4 +1,4 @@
|
|||
import diffObj = require('./index');
|
||||
import * as diffObj from './index'
|
||||
|
||||
const {diff, jsonPatchPathConverter} = diffObj;
|
||||
const obj1 = {a: 2, b: 3};
|
||||
|
|
|
@ -1,11 +1,19 @@
|
|||
{
|
||||
"name": "just-diff",
|
||||
"version": "3.1.1",
|
||||
"version": "5.0.1",
|
||||
"description": "Return an object representing the diffs between two objects. Supports jsonPatch protocol",
|
||||
"main": "index.js",
|
||||
"module": "index.mjs",
|
||||
"exports": {
|
||||
".": {
|
||||
"require": "./index.js",
|
||||
"default": "./index.mjs"
|
||||
}
|
||||
},
|
||||
"types": "index.d.ts",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build": "rollup -c"
|
||||
},
|
||||
"repository": "https://github.com/angus-c/just",
|
||||
"keywords": [
|
||||
|
@ -20,4 +28,4 @@
|
|||
"bugs": {
|
||||
"url": "https://github.com/angus-c/just/issues"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
const createRollupConfig = require('../../config/createRollupConfig');
|
||||
|
||||
module.exports = createRollupConfig(__dirname);
|
|
@ -165,7 +165,12 @@ module.exports = class Minipass extends Stream {
|
|||
// because we're mid-write, so that'd be bad.
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this[FLUSH](true)
|
||||
this.emit('data', chunk)
|
||||
|
||||
// if we are still flowing after flushing the buffer we can emit the
|
||||
// chunk otherwise we have to buffer it.
|
||||
this.flowing
|
||||
? this.emit('data', chunk)
|
||||
: this[BUFFERPUSH](chunk)
|
||||
} else
|
||||
this[BUFFERPUSH](chunk)
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "minipass",
|
||||
"version": "3.1.5",
|
||||
"version": "3.1.6",
|
||||
"description": "minimal implementation of a PassThrough stream",
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) npm, Inc. and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
@ -0,0 +1,20 @@
|
|||
<!-- This file is automatically added by @npmcli/template-oss. Do not edit. -->
|
||||
|
||||
ISC License
|
||||
|
||||
Copyright npm, Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this
|
||||
software for any purpose with or without fee is hereby
|
||||
granted, provided that the above copyright notice and this
|
||||
permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
|
||||
WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
|
||||
EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
|
||||
WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
||||
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
|
||||
USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
@ -2,13 +2,16 @@ const parseJSON = require('json-parse-even-better-errors')
|
|||
const { diff } = require('just-diff')
|
||||
const { diffApply } = require('just-diff-apply')
|
||||
|
||||
const globalObjectProperties = Object.getOwnPropertyNames(Object.prototype)
|
||||
|
||||
const stripBOM = content => {
|
||||
content = content.toString()
|
||||
// Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
|
||||
// because the buffer-to-string conversion in `fs.readFileSync()`
|
||||
// translates it to FEFF, the UTF-16 BOM.
|
||||
if (content.charCodeAt(0) === 0xFEFF)
|
||||
if (content.charCodeAt(0) === 0xFEFF) {
|
||||
content = content.slice(1)
|
||||
}
|
||||
return content
|
||||
}
|
||||
|
||||
|
@ -22,37 +25,42 @@ const isDiff = str =>
|
|||
|
||||
const parseConflictJSON = (str, reviver, prefer) => {
|
||||
prefer = prefer || 'ours'
|
||||
if (prefer !== 'theirs' && prefer !== 'ours')
|
||||
if (prefer !== 'theirs' && prefer !== 'ours') {
|
||||
throw new TypeError('prefer param must be "ours" or "theirs" if set')
|
||||
}
|
||||
|
||||
str = stripBOM(str)
|
||||
|
||||
if (!isDiff(str))
|
||||
if (!isDiff(str)) {
|
||||
return parseJSON(str)
|
||||
}
|
||||
|
||||
const pieces = str.split(/[\n\r]+/g).reduce((acc, line) => {
|
||||
if (line.match(PARENT_RE))
|
||||
if (line.match(PARENT_RE)) {
|
||||
acc.state = 'parent'
|
||||
else if (line.match(OURS_RE))
|
||||
} else if (line.match(OURS_RE)) {
|
||||
acc.state = 'ours'
|
||||
else if (line.match(THEIRS_RE))
|
||||
} else if (line.match(THEIRS_RE)) {
|
||||
acc.state = 'theirs'
|
||||
else if (line.match(END_RE))
|
||||
} else if (line.match(END_RE)) {
|
||||
acc.state = 'top'
|
||||
else {
|
||||
if (acc.state === 'top' || acc.state === 'ours')
|
||||
} else {
|
||||
if (acc.state === 'top' || acc.state === 'ours') {
|
||||
acc.ours += line
|
||||
if (acc.state === 'top' || acc.state === 'theirs')
|
||||
}
|
||||
if (acc.state === 'top' || acc.state === 'theirs') {
|
||||
acc.theirs += line
|
||||
if (acc.state === 'top' || acc.state === 'parent')
|
||||
}
|
||||
if (acc.state === 'top' || acc.state === 'parent') {
|
||||
acc.parent += line
|
||||
}
|
||||
}
|
||||
return acc
|
||||
}, {
|
||||
state: 'top',
|
||||
ours: '',
|
||||
theirs: '',
|
||||
parent: ''
|
||||
parent: '',
|
||||
})
|
||||
|
||||
// this will throw if either piece is not valid JSON, that's intended
|
||||
|
@ -70,8 +78,9 @@ const isObj = obj => obj && typeof obj === 'object'
|
|||
const copyPath = (to, from, path, i) => {
|
||||
const p = path[i]
|
||||
if (isObj(to[p]) && isObj(from[p]) &&
|
||||
Array.isArray(to[p]) === Array.isArray(from[p]))
|
||||
Array.isArray(to[p]) === Array.isArray(from[p])) {
|
||||
return copyPath(to[p], from[p], path, i + 1)
|
||||
}
|
||||
to[p] = from[p]
|
||||
}
|
||||
|
||||
|
@ -80,6 +89,9 @@ const copyPath = (to, from, path, i) => {
|
|||
const resolve = (parent, ours, theirs) => {
|
||||
const dours = diff(parent, ours)
|
||||
for (let i = 0; i < dours.length; i++) {
|
||||
if (globalObjectProperties.find(prop => dours[i].path.includes(prop))) {
|
||||
continue
|
||||
}
|
||||
try {
|
||||
diffApply(theirs, [dours[i]])
|
||||
} catch (e) {
|
|
@ -1,32 +1,44 @@
|
|||
{
|
||||
"name": "parse-conflict-json",
|
||||
"version": "1.1.1",
|
||||
"version": "2.0.1",
|
||||
"description": "Parse a JSON string that has git merge conflicts, resolving if possible",
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)",
|
||||
"author": "GitHub Inc.",
|
||||
"license": "ISC",
|
||||
"main": "lib",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
"postpublish": "git push origin --follow-tags",
|
||||
"lint": "eslint '**/*.js'",
|
||||
"postlint": "npm-template-check",
|
||||
"lintfix": "npm run lint -- --fix",
|
||||
"prepublishOnly": "git push origin --follow-tags",
|
||||
"posttest": "npm run lint"
|
||||
},
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "^14.6.1"
|
||||
"@npmcli/template-oss": "^2.3.1",
|
||||
"tap": "^15.1.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"just-diff": "^3.0.1",
|
||||
"just-diff-apply": "^3.0.0",
|
||||
"json-parse-even-better-errors": "^2.3.0"
|
||||
"json-parse-even-better-errors": "^2.3.1",
|
||||
"just-diff": "^5.0.1",
|
||||
"just-diff-apply": "^4.0.1"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/npm/parse-conflict-json.git"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
]
|
||||
"bin",
|
||||
"lib"
|
||||
],
|
||||
"templateVersion": "2.3.1",
|
||||
"engines": {
|
||||
"node": "^12.13.0 || ^14.15.0 || >=16"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "8.2.0",
|
||||
"version": "8.3.0",
|
||||
"name": "npm",
|
||||
"description": "a package manager for JavaScript",
|
||||
"workspaces": [
|
||||
|
@ -55,7 +55,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@isaacs/string-locale-compare": "^1.1.0",
|
||||
"@npmcli/arborist": "^4.0.5",
|
||||
"@npmcli/arborist": "^4.1.1",
|
||||
"@npmcli/ci-detect": "^1.4.0",
|
||||
"@npmcli/config": "^2.3.2",
|
||||
"@npmcli/map-workspaces": "^2.0.0",
|
||||
|
@ -91,7 +91,7 @@
|
|||
"libnpmteam": "^2.0.3",
|
||||
"libnpmversion": "^2.0.1",
|
||||
"make-fetch-happen": "^9.1.0",
|
||||
"minipass": "^3.1.3",
|
||||
"minipass": "^3.1.6",
|
||||
"minipass-pipeline": "^1.2.4",
|
||||
"mkdirp": "^1.0.4",
|
||||
"mkdirp-infer-owner": "^2.0.0",
|
||||
|
@ -108,7 +108,7 @@
|
|||
"npmlog": "^6.0.0",
|
||||
"opener": "^1.5.2",
|
||||
"pacote": "^12.0.2",
|
||||
"parse-conflict-json": "^1.1.1",
|
||||
"parse-conflict-json": "^2.0.1",
|
||||
"proc-log": "^1.0.0",
|
||||
"qrcode-terminal": "^0.12.0",
|
||||
"read": "~1.0.7",
|
||||
|
|
|
@ -342,3 +342,44 @@ userconfig = "{HOME}/.npmrc"
|
|||
; HOME = {HOME}
|
||||
; Run \`npm config ls -l\` to show all defaults.
|
||||
`
|
||||
|
||||
exports[`test/lib/commands/config.js TAP config list with publishConfig > output matches snapshot 1`] = `
|
||||
; "cli" config from command line options
|
||||
|
||||
cache = "{NPMDIR}/test/lib/commands/tap-testdir-config-config-list-with-publishConfig-sandbox/cache"
|
||||
prefix = "{LOCALPREFIX}"
|
||||
userconfig = "{HOME}/.npmrc"
|
||||
|
||||
; node bin location = {EXECPATH}
|
||||
; cwd = {NPMDIR}
|
||||
; HOME = {HOME}
|
||||
; Run \`npm config ls -l\` to show all defaults.
|
||||
|
||||
; "publishConfig" from {LOCALPREFIX}/package.json
|
||||
; This set of config values will be used at publish-time.
|
||||
|
||||
_authToken = (protected)
|
||||
registry = "https://some.registry"
|
||||
; "env" config from environment
|
||||
|
||||
; cache = "{NPMDIR}/test/lib/commands/tap-testdir-config-config-list-with-publishConfig-sandbox/cache" ; overridden by cli
|
||||
global-prefix = "{LOCALPREFIX}"
|
||||
globalconfig = "{GLOBALPREFIX}/npmrc"
|
||||
init-module = "{HOME}/.npm-init.js"
|
||||
local-prefix = "{LOCALPREFIX}"
|
||||
; prefix = "{LOCALPREFIX}" ; overridden by cli
|
||||
user-agent = "npm/{NPM-VERSION} node/{NODE-VERSION} {PLATFORM} {ARCH} workspaces/false"
|
||||
; userconfig = "{HOME}/.npmrc" ; overridden by cli
|
||||
|
||||
; "cli" config from command line options
|
||||
|
||||
cache = "{NPMDIR}/test/lib/commands/tap-testdir-config-config-list-with-publishConfig-sandbox/cache"
|
||||
global = true
|
||||
prefix = "{LOCALPREFIX}"
|
||||
userconfig = "{HOME}/.npmrc"
|
||||
|
||||
; node bin location = {EXECPATH}
|
||||
; cwd = {NPMDIR}
|
||||
; HOME = {HOME}
|
||||
; Run \`npm config ls -l\` to show all defaults.
|
||||
`
|
||||
|
|
|
@ -107,6 +107,26 @@ t.test('config list --json', async t => {
|
|||
t.matchSnapshot(sandbox.output, 'output matches snapshot')
|
||||
})
|
||||
|
||||
t.test('config list with publishConfig', async t => {
|
||||
const temp = t.testdir({
|
||||
project: {
|
||||
'package.json': JSON.stringify({
|
||||
publishConfig: {
|
||||
registry: 'https://some.registry',
|
||||
_authToken: 'mytoken',
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
const project = join(temp, 'project')
|
||||
|
||||
const sandbox = new Sandbox(t, { project })
|
||||
await sandbox.run('config', ['list', ''])
|
||||
await sandbox.run('config', ['list', '--global'])
|
||||
|
||||
t.matchSnapshot(sandbox.output, 'output matches snapshot')
|
||||
})
|
||||
|
||||
t.test('config delete no args', async t => {
|
||||
const sandbox = new Sandbox(t)
|
||||
|
||||
|
@ -333,7 +353,13 @@ t.test('config get private key', async t => {
|
|||
|
||||
await t.rejects(
|
||||
sandbox.run('config', ['get', '_authToken']),
|
||||
'_authToken is protected',
|
||||
/_authToken option is protected/,
|
||||
'rejects with protected string'
|
||||
)
|
||||
|
||||
await t.rejects(
|
||||
sandbox.run('config', ['get', '//localhost:8080/:_password']),
|
||||
/_password option is protected/,
|
||||
'rejects with protected string'
|
||||
)
|
||||
})
|
||||
|
|
|
@ -341,8 +341,10 @@ t.test('can publish a tarball', async t => {
|
|||
|
||||
t.test('should check auth for default registry', async t => {
|
||||
t.plan(2)
|
||||
const Publish = t.mock('../../../lib/commands/publish.js')
|
||||
const npm = mockNpm()
|
||||
const registry = npm.config.get('registry')
|
||||
const errorMessage = `This command requires you to be logged in to ${registry}`
|
||||
const Publish = t.mock('../../../lib/commands/publish.js')
|
||||
npm.config.getCredentialsByURI = uri => {
|
||||
t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry')
|
||||
return {}
|
||||
|
@ -351,7 +353,7 @@ t.test('should check auth for default registry', async t => {
|
|||
|
||||
await t.rejects(
|
||||
publish.exec([]),
|
||||
{ message: 'This command requires you to be logged in.', code: 'ENEEDAUTH' },
|
||||
{ message: errorMessage, code: 'ENEEDAUTH' },
|
||||
'throws when not logged in'
|
||||
)
|
||||
})
|
||||
|
@ -359,6 +361,7 @@ t.test('should check auth for default registry', async t => {
|
|||
t.test('should check auth for configured registry', async t => {
|
||||
t.plan(2)
|
||||
const registry = 'https://some.registry'
|
||||
const errorMessage = 'This command requires you to be logged in to https://some.registry'
|
||||
const Publish = t.mock('../../../lib/commands/publish.js')
|
||||
const npm = mockNpm({
|
||||
flatOptions: { registry },
|
||||
|
@ -371,7 +374,7 @@ t.test('should check auth for configured registry', async t => {
|
|||
|
||||
await t.rejects(
|
||||
publish.exec([]),
|
||||
{ message: 'This command requires you to be logged in.', code: 'ENEEDAUTH' },
|
||||
{ message: errorMessage, code: 'ENEEDAUTH' },
|
||||
'throws when not logged in'
|
||||
)
|
||||
})
|
||||
|
@ -379,6 +382,7 @@ t.test('should check auth for configured registry', async t => {
|
|||
t.test('should check auth for scope specific registry', async t => {
|
||||
t.plan(2)
|
||||
const registry = 'https://some.registry'
|
||||
const errorMessage = 'This command requires you to be logged in to https://some.registry'
|
||||
const testDir = t.testdir({
|
||||
'package.json': JSON.stringify(
|
||||
{
|
||||
|
@ -402,7 +406,7 @@ t.test('should check auth for scope specific registry', async t => {
|
|||
|
||||
await t.rejects(
|
||||
publish.exec([testDir]),
|
||||
{ message: 'This command requires you to be logged in.', code: 'ENEEDAUTH' },
|
||||
{ message: errorMessage, code: 'ENEEDAUTH' },
|
||||
'throws when not logged in'
|
||||
)
|
||||
})
|
||||
|
@ -735,7 +739,7 @@ t.test('private workspaces', async t => {
|
|||
})
|
||||
|
||||
t.test('unexpected error', async t => {
|
||||
t.plan(1)
|
||||
t.plan(2)
|
||||
|
||||
const Publish = t.mock('../../../lib/commands/publish.js', {
|
||||
...mocks,
|
||||
|
@ -749,7 +753,9 @@ t.test('private workspaces', async t => {
|
|||
},
|
||||
},
|
||||
'proc-log': {
|
||||
notice () {},
|
||||
notice (__, msg) {
|
||||
t.match(msg, 'Publishing to https://registry.npmjs.org/')
|
||||
},
|
||||
verbose () {},
|
||||
},
|
||||
})
|
||||
|
|
|
@ -12,15 +12,20 @@ t.cleanSnapshot = (path) => cleanCwd(path)
|
|||
|
||||
const last = arr => arr[arr.length - 1]
|
||||
const range = (n) => Array.from(Array(n).keys())
|
||||
const makeOldLogs = (count) => {
|
||||
const makeOldLogs = (count, oldStyle) => {
|
||||
const d = new Date()
|
||||
d.setHours(-1)
|
||||
d.setSeconds(0)
|
||||
return range(count / 2).reduce((acc, i) => {
|
||||
return range(oldStyle ? count : (count / 2)).reduce((acc, i) => {
|
||||
const cloneDate = new Date(d.getTime())
|
||||
cloneDate.setSeconds(i)
|
||||
acc[LogFile.fileName(LogFile.logId(cloneDate), 0)] = 'hello'
|
||||
acc[LogFile.fileName(LogFile.logId(cloneDate), 1)] = 'hello'
|
||||
const dateId = LogFile.logId(cloneDate)
|
||||
if (oldStyle) {
|
||||
acc[`${dateId}-debug.log`] = 'hello'
|
||||
} else {
|
||||
acc[`${dateId}-debug-0.log`] = 'hello'
|
||||
acc[`${dateId}-debug-1.log`] = 'hello'
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
|
@ -247,6 +252,18 @@ t.test('glob error', async t => {
|
|||
t.match(last(logs).content, /error cleaning log files .* bad glob/)
|
||||
})
|
||||
|
||||
t.test('cleans old style logs too', async t => {
|
||||
const logsMax = 5
|
||||
const oldLogs = 10
|
||||
const { readLogs } = await loadLogFile(t, {
|
||||
logsMax,
|
||||
testdir: makeOldLogs(oldLogs, false),
|
||||
})
|
||||
|
||||
const logs = await readLogs()
|
||||
t.equal(logs.length, logsMax + 1)
|
||||
})
|
||||
|
||||
t.test('rimraf error', async t => {
|
||||
const logsMax = 5
|
||||
const oldLogs = 10
|
||||
|
|
Loading…
Reference in New Issue