Change endpoint from persons to people

This commit is contained in:
xfarrow
2025-03-23 21:00:08 +01:00
parent 4ae263662c
commit d005193f63
7158 changed files with 700476 additions and 735 deletions

View File

@ -0,0 +1,28 @@
module.exports = class CovBranch {
constructor (startLine, startCol, endLine, endCol, count) {
this.startLine = startLine
this.startCol = startCol
this.endLine = endLine
this.endCol = endCol
this.count = count
}
toIstanbul () {
const location = {
start: {
line: this.startLine,
column: this.startCol
},
end: {
line: this.endLine,
column: this.endCol
}
}
return {
type: 'branch',
line: this.startLine,
loc: location,
locations: [Object.assign({}, location)]
}
}
}

View File

@ -0,0 +1,29 @@
module.exports = class CovFunction {
constructor (name, startLine, startCol, endLine, endCol, count) {
this.name = name
this.startLine = startLine
this.startCol = startCol
this.endLine = endLine
this.endCol = endCol
this.count = count
}
toIstanbul () {
const loc = {
start: {
line: this.startLine,
column: this.startCol
},
end: {
line: this.endLine,
column: this.endCol
}
}
return {
name: this.name,
decl: loc,
loc,
line: this.startLine
}
}
}

View File

@ -0,0 +1,34 @@
module.exports = class CovLine {
constructor (line, startCol, lineStr) {
this.line = line
// note that startCol and endCol are absolute positions
// within a file, not relative to the line.
this.startCol = startCol
// the line length itself does not include the newline characters,
// these are however taken into account when enumerating absolute offset.
const matchedNewLineChar = lineStr.match(/\r?\n$/u)
const newLineLength = matchedNewLineChar ? matchedNewLineChar[0].length : 0
this.endCol = startCol + lineStr.length - newLineLength
// we start with all lines having been executed, and work
// backwards zeroing out lines based on V8 output.
this.count = 1
// set by source.js during parsing, if /* c8 ignore next */ is found.
this.ignore = false
}
toIstanbul () {
return {
start: {
line: this.line,
column: 0
},
end: {
line: this.line,
column: this.endCol - this.startCol
}
}
}
}

View File

@ -0,0 +1,35 @@
/**
* ...something resembling a binary search, to find the lowest line within the range.
* And then you could break as soon as the line is longer than the range...
*/
module.exports.sliceRange = (lines, startCol, endCol, inclusive = false) => {
let start = 0
let end = lines.length
if (inclusive) {
// I consider this a temporary solution until I find an alternaive way to fix the "off by one issue"
--startCol
}
while (start < end) {
let mid = (start + end) >> 1
if (startCol >= lines[mid].endCol) {
start = mid + 1
} else if (endCol < lines[mid].startCol) {
end = mid - 1
} else {
end = mid
while (mid >= 0 && startCol < lines[mid].endCol && endCol >= lines[mid].startCol) {
--mid
}
start = mid + 1
break
}
}
while (end < lines.length && startCol < lines[end].endCol && endCol >= lines[end].startCol) {
++end
}
return lines.slice(start, end)
}

View File

@ -0,0 +1,254 @@
const CovLine = require('./line')
const { sliceRange } = require('./range')
const { originalPositionFor, generatedPositionFor, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND } = require('@jridgewell/trace-mapping')
module.exports = class CovSource {
constructor (sourceRaw, wrapperLength) {
sourceRaw = sourceRaw ? sourceRaw.trimEnd() : ''
this.lines = []
this.eof = sourceRaw.length
this.shebangLength = getShebangLength(sourceRaw)
this.wrapperLength = wrapperLength - this.shebangLength
this._buildLines(sourceRaw)
}
_buildLines (source) {
let position = 0
let ignoreCount = 0
let ignoreAll = false
for (const [i, lineStr] of source.split(/(?<=\r?\n)/u).entries()) {
const line = new CovLine(i + 1, position, lineStr)
if (ignoreCount > 0) {
line.ignore = true
ignoreCount--
} else if (ignoreAll) {
line.ignore = true
}
this.lines.push(line)
position += lineStr.length
const ignoreToken = this._parseIgnore(lineStr)
if (!ignoreToken) continue
line.ignore = true
if (ignoreToken.count !== undefined) {
ignoreCount = ignoreToken.count
}
if (ignoreToken.start || ignoreToken.stop) {
ignoreAll = ignoreToken.start
ignoreCount = 0
}
}
}
/**
* Parses for comments:
* c8 ignore next
* c8 ignore next 3
* c8 ignore start
* c8 ignore stop
* And equivalent ones for v8, e.g. v8 ignore next.
* @param {string} lineStr
* @return {{count?: number, start?: boolean, stop?: boolean}|undefined}
*/
_parseIgnore (lineStr) {
const testIgnoreNextLines = lineStr.match(/^\W*\/\* (?:[cv]8|node:coverage) ignore next (?<count>[0-9]+)/)
if (testIgnoreNextLines) {
return { count: Number(testIgnoreNextLines.groups.count) }
}
// Check if comment is on its own line.
if (lineStr.match(/^\W*\/\* (?:[cv]8|node:coverage) ignore next/)) {
return { count: 1 }
}
if (lineStr.match(/\/\* ([cv]8|node:coverage) ignore next/)) {
// Won't ignore successive lines, but the current line will be ignored.
return { count: 0 }
}
const testIgnoreStartStop = lineStr.match(/\/\* [c|v]8 ignore (?<mode>start|stop)/)
if (testIgnoreStartStop) {
if (testIgnoreStartStop.groups.mode === 'start') return { start: true }
if (testIgnoreStartStop.groups.mode === 'stop') return { stop: true }
}
const testNodeIgnoreStartStop = lineStr.match(/\/\* node:coverage (?<mode>enable|disable)/)
if (testNodeIgnoreStartStop) {
if (testNodeIgnoreStartStop.groups.mode === 'disable') return { start: true }
if (testNodeIgnoreStartStop.groups.mode === 'enable') return { stop: true }
}
}
// given a start column and end column in absolute offsets within
// a source file (0 - EOF), returns the relative line column positions.
offsetToOriginalRelative (sourceMap, startCol, endCol) {
const lines = sliceRange(this.lines, startCol, endCol, true)
if (!lines.length) return {}
const start = originalPositionTryBoth(
sourceMap,
lines[0].line,
Math.max(0, startCol - lines[0].startCol)
)
if (!(start && start.source)) {
return {}
}
let end = originalEndPositionFor(
sourceMap,
lines[lines.length - 1].line,
endCol - lines[lines.length - 1].startCol
)
if (!(end && end.source)) {
return {}
}
if (start.source !== end.source) {
return {}
}
if (start.line === end.line && start.column === end.column) {
end = originalPositionFor(sourceMap, {
line: lines[lines.length - 1].line,
column: endCol - lines[lines.length - 1].startCol,
bias: LEAST_UPPER_BOUND
})
end.column -= 1
}
return {
source: start.source,
startLine: start.line,
relStartCol: start.column,
endLine: end.line,
relEndCol: end.column
}
}
relativeToOffset (line, relCol) {
line = Math.max(line, 1)
if (this.lines[line - 1] === undefined) return this.eof
return Math.min(this.lines[line - 1].startCol + relCol, this.lines[line - 1].endCol)
}
}
// this implementation is pulled over from istanbul-lib-sourcemap:
// https://github.com/istanbuljs/istanbuljs/blob/master/packages/istanbul-lib-source-maps/lib/get-mapping.js
//
/**
* AST ranges are inclusive for start positions and exclusive for end positions.
* Source maps are also logically ranges over text, though interacting with
* them is generally achieved by working with explicit positions.
*
* When finding the _end_ location of an AST item, the range behavior is
* important because what we're asking for is the _end_ of whatever range
* corresponds to the end location we seek.
*
* This boils down to the following steps, conceptually, though the source-map
* library doesn't expose primitives to do this nicely:
*
* 1. Find the range on the generated file that ends at, or exclusively
* contains the end position of the AST node.
* 2. Find the range on the original file that corresponds to
* that generated range.
* 3. Find the _end_ location of that original range.
*/
function originalEndPositionFor (sourceMap, line, column) {
// Given the generated location, find the original location of the mapping
// that corresponds to a range on the generated file that overlaps the
// generated file end location. Note however that this position on its
// own is not useful because it is the position of the _start_ of the range
// on the original file, and we want the _end_ of the range.
const beforeEndMapping = originalPositionTryBoth(
sourceMap,
line,
Math.max(column - 1, 1)
)
if (beforeEndMapping.source === null) {
return null
}
// Convert that original position back to a generated one, with a bump
// to the right, and a rightward bias. Since 'generatedPositionFor' searches
// for mappings in the original-order sorted list, this will find the
// mapping that corresponds to the one immediately after the
// beforeEndMapping mapping.
const afterEndMapping = generatedPositionFor(sourceMap, {
source: beforeEndMapping.source,
line: beforeEndMapping.line,
column: beforeEndMapping.column + 1,
bias: LEAST_UPPER_BOUND
})
if (
// If this is null, it means that we've hit the end of the file,
// so we can use Infinity as the end column.
afterEndMapping.line === null ||
// If these don't match, it means that the call to
// 'generatedPositionFor' didn't find any other original mappings on
// the line we gave, so consider the binding to extend to infinity.
originalPositionFor(sourceMap, afterEndMapping).line !==
beforeEndMapping.line
) {
return {
source: beforeEndMapping.source,
line: beforeEndMapping.line,
column: Infinity
}
}
// Convert the end mapping into the real original position.
return originalPositionFor(sourceMap, afterEndMapping)
}
function originalPositionTryBoth (sourceMap, line, column) {
let original = originalPositionFor(sourceMap, {
line,
column,
bias: GREATEST_LOWER_BOUND
})
if (original.line === null) {
original = originalPositionFor(sourceMap, {
line,
column,
bias: LEAST_UPPER_BOUND
})
}
// The source maps generated by https://github.com/istanbuljs/istanbuljs
// (using @babel/core 7.7.5) have behavior, such that a mapping
// mid-way through a line maps to an earlier line than a mapping
// at position 0. Using the line at positon 0 seems to provide better reports:
//
// if (true) {
// cov_y5divc6zu().b[1][0]++;
// cov_y5divc6zu().s[3]++;
// console.info('reachable');
// } else { ... }
// ^ ^
// l5 l3
const min = originalPositionFor(sourceMap, {
line,
column: 0,
bias: GREATEST_LOWER_BOUND
})
if (min.line > original.line) {
original = min
}
return original
}
// Not required since Node 12, see: https://github.com/nodejs/node/pull/27375
const isPreNode12 = /^v1[0-1]\./u.test(process.version)
function getShebangLength (source) {
/* c8 ignore start - platform-specific */
if (isPreNode12 && source.indexOf('#!') === 0) {
const match = source.match(/(?<shebang>#!.*)/)
if (match) {
return match.groups.shebang.length
}
} else {
/* c8 ignore stop - platform-specific */
return 0
}
}

View File

@ -0,0 +1,323 @@
const assert = require('assert')
const convertSourceMap = require('convert-source-map')
const util = require('util')
const debuglog = util.debuglog('c8')
const { dirname, isAbsolute, join, resolve } = require('path')
const { fileURLToPath } = require('url')
const CovBranch = require('./branch')
const CovFunction = require('./function')
const CovSource = require('./source')
const { sliceRange } = require('./range')
const compatError = Error(`requires Node.js ${require('../package.json').engines.node}`)
const { readFileSync } = require('fs')
let readFile = () => { throw compatError }
try {
readFile = require('fs').promises.readFile
} catch (_err) {
// most likely we're on an older version of Node.js.
}
const { TraceMap } = require('@jridgewell/trace-mapping')
const isOlderNode10 = /^v10\.(([0-9]\.)|(1[0-5]\.))/u.test(process.version)
const isNode8 = /^v8\./.test(process.version)
// Injected when Node.js is loading script into isolate pre Node 10.16.x.
// see: https://github.com/nodejs/node/pull/21573.
const cjsWrapperLength = isOlderNode10 ? require('module').wrapper[0].length : 0
module.exports = class V8ToIstanbul {
constructor (scriptPath, wrapperLength, sources, excludePath) {
assert(typeof scriptPath === 'string', 'scriptPath must be a string')
assert(!isNode8, 'This module does not support node 8 or lower, please upgrade to node 10')
this.path = parsePath(scriptPath)
this.wrapperLength = wrapperLength === undefined ? cjsWrapperLength : wrapperLength
this.excludePath = excludePath || (() => false)
this.sources = sources || {}
this.generatedLines = []
this.branches = {}
this.functions = {}
this.covSources = []
this.rawSourceMap = undefined
this.sourceMap = undefined
this.sourceTranspiled = undefined
// Indicate that this report was generated with placeholder data from
// running --all:
this.all = false
}
async load () {
const rawSource = this.sources.source || await readFile(this.path, 'utf8')
this.rawSourceMap = this.sources.sourceMap ||
// if we find a source-map (either inline, or a .map file) we load
// both the transpiled and original source, both of which are used during
// the backflips we perform to remap absolute to relative positions.
convertSourceMap.fromSource(rawSource) || convertSourceMap.fromMapFileSource(rawSource, this._readFileFromDir.bind(this))
if (this.rawSourceMap) {
if (this.rawSourceMap.sourcemap.sources.length > 1) {
this.sourceMap = new TraceMap(this.rawSourceMap.sourcemap)
if (!this.sourceMap.sourcesContent) {
this.sourceMap.sourcesContent = await this.sourcesContentFromSources()
}
this.covSources = this.sourceMap.sourcesContent.map((rawSource, i) => ({ source: new CovSource(rawSource, this.wrapperLength), path: this.sourceMap.sources[i] }))
this.sourceTranspiled = new CovSource(rawSource, this.wrapperLength)
} else {
const candidatePath = this.rawSourceMap.sourcemap.sources.length >= 1 ? this.rawSourceMap.sourcemap.sources[0] : this.rawSourceMap.sourcemap.file
this.path = this._resolveSource(this.rawSourceMap, candidatePath || this.path)
this.sourceMap = new TraceMap(this.rawSourceMap.sourcemap)
let originalRawSource
if (this.sources.sourceMap && this.sources.sourceMap.sourcemap && this.sources.sourceMap.sourcemap.sourcesContent && this.sources.sourceMap.sourcemap.sourcesContent.length === 1) {
// If the sourcesContent field has been provided, return it rather than attempting
// to load the original source from disk.
// TODO: investigate whether there's ever a case where we hit this logic with 1:many sources.
originalRawSource = this.sources.sourceMap.sourcemap.sourcesContent[0]
} else if (this.sources.originalSource) {
// Original source may be populated on the sources object.
originalRawSource = this.sources.originalSource
} else if (this.sourceMap.sourcesContent && this.sourceMap.sourcesContent[0]) {
// perhaps we loaded sourcesContent was populated by an inline source map, or .map file?
// TODO: investigate whether there's ever a case where we hit this logic with 1:many sources.
originalRawSource = this.sourceMap.sourcesContent[0]
} else {
// We fallback to reading the original source from disk.
originalRawSource = await readFile(this.path, 'utf8')
}
this.covSources = [{ source: new CovSource(originalRawSource, this.wrapperLength), path: this.path }]
this.sourceTranspiled = new CovSource(rawSource, this.wrapperLength)
}
} else {
this.covSources = [{ source: new CovSource(rawSource, this.wrapperLength), path: this.path }]
}
}
_readFileFromDir (filename) {
return readFileSync(resolve(dirname(this.path), filename), 'utf-8')
}
async sourcesContentFromSources () {
const fileList = this.sourceMap.sources.map(relativePath => {
const realPath = this._resolveSource(this.rawSourceMap, relativePath)
return readFile(realPath, 'utf-8')
.then(result => result)
.catch(err => {
debuglog(`failed to load ${realPath}: ${err.message}`)
})
})
return await Promise.all(fileList)
}
destroy () {
// no longer necessary, but preserved for backwards compatibility.
}
_resolveSource (rawSourceMap, sourcePath) {
if (sourcePath.startsWith('file://')) {
return fileURLToPath(sourcePath)
}
sourcePath = sourcePath.replace(/^webpack:\/\//, '')
const sourceRoot = rawSourceMap.sourcemap.sourceRoot ? rawSourceMap.sourcemap.sourceRoot.replace('file://', '') : ''
const candidatePath = join(sourceRoot, sourcePath)
if (isAbsolute(candidatePath)) {
return candidatePath
} else {
return resolve(dirname(this.path), candidatePath)
}
}
applyCoverage (blocks) {
blocks.forEach(block => {
block.ranges.forEach((range, i) => {
const isEmptyCoverage = block.functionName === '(empty-report)'
const { startCol, endCol, path, covSource } = this._maybeRemapStartColEndCol(range, isEmptyCoverage)
if (this.excludePath(path)) {
return
}
let lines
if (isEmptyCoverage) {
// (empty-report), this will result in a report that has all lines zeroed out.
lines = covSource.lines.filter((line) => {
line.count = 0
return true
})
this.all = lines.length > 0
} else {
lines = sliceRange(covSource.lines, startCol, endCol)
}
if (!lines.length) {
return
}
const startLineInstance = lines[0]
const endLineInstance = lines[lines.length - 1]
if (block.isBlockCoverage) {
this.branches[path] = this.branches[path] || []
// record branches.
this.branches[path].push(new CovBranch(
startLineInstance.line,
startCol - startLineInstance.startCol,
endLineInstance.line,
endCol - endLineInstance.startCol,
range.count
))
// if block-level granularity is enabled, we still create a single
// CovFunction tracking object for each set of ranges.
if (block.functionName && i === 0) {
this.functions[path] = this.functions[path] || []
this.functions[path].push(new CovFunction(
block.functionName,
startLineInstance.line,
startCol - startLineInstance.startCol,
endLineInstance.line,
endCol - endLineInstance.startCol,
range.count
))
}
} else if (block.functionName) {
this.functions[path] = this.functions[path] || []
// record functions.
this.functions[path].push(new CovFunction(
block.functionName,
startLineInstance.line,
startCol - startLineInstance.startCol,
endLineInstance.line,
endCol - endLineInstance.startCol,
range.count
))
}
// record the lines (we record these as statements, such that we're
// compatible with Istanbul 2.0).
lines.forEach(line => {
// make sure branch spans entire line; don't record 'goodbye'
// branch in `const foo = true ? 'hello' : 'goodbye'` as a
// 0 for line coverage.
//
// All lines start out with coverage of 1, and are later set to 0
// if they are not invoked; line.ignore prevents a line from being
// set to 0, and is set if the special comment /* c8 ignore next */
// is used.
if (startCol <= line.startCol && endCol >= line.endCol && !line.ignore) {
line.count = range.count
}
})
})
})
}
_maybeRemapStartColEndCol (range, isEmptyCoverage) {
let covSource = this.covSources[0].source
const covSourceWrapperLength = isEmptyCoverage ? 0 : covSource.wrapperLength
let startCol = Math.max(0, range.startOffset - covSourceWrapperLength)
let endCol = Math.min(covSource.eof, range.endOffset - covSourceWrapperLength)
let path = this.path
if (this.sourceMap) {
const sourceTranspiledWrapperLength = isEmptyCoverage ? 0 : this.sourceTranspiled.wrapperLength
startCol = Math.max(0, range.startOffset - sourceTranspiledWrapperLength)
endCol = Math.min(this.sourceTranspiled.eof, range.endOffset - sourceTranspiledWrapperLength)
const { startLine, relStartCol, endLine, relEndCol, source } = this.sourceTranspiled.offsetToOriginalRelative(
this.sourceMap,
startCol,
endCol
)
const matchingSource = this.covSources.find(covSource => covSource.path === source)
covSource = matchingSource ? matchingSource.source : this.covSources[0].source
path = matchingSource ? matchingSource.path : this.covSources[0].path
// next we convert these relative positions back to absolute positions
// in the original source (which is the format expected in the next step).
startCol = covSource.relativeToOffset(startLine, relStartCol)
endCol = covSource.relativeToOffset(endLine, relEndCol)
}
return {
path,
covSource,
startCol,
endCol
}
}
getInnerIstanbul (source, path) {
// We apply the "Resolving Sources" logic (as defined in
// sourcemaps.info/spec.html) as a final step for 1:many source maps.
// for 1:1 source maps, the resolve logic is applied while loading.
//
// TODO: could we move the resolving logic for 1:1 source maps to the final
// step as well? currently this breaks some tests in c8.
let resolvedPath = path
if (this.rawSourceMap && this.rawSourceMap.sourcemap.sources.length > 1) {
resolvedPath = this._resolveSource(this.rawSourceMap, path)
}
if (this.excludePath(resolvedPath)) {
return
}
return {
[resolvedPath]: {
path: resolvedPath,
all: this.all,
...this._statementsToIstanbul(source, path),
...this._branchesToIstanbul(source, path),
...this._functionsToIstanbul(source, path)
}
}
}
toIstanbul () {
return this.covSources.reduce((istanbulOuter, { source, path }) => Object.assign(istanbulOuter, this.getInnerIstanbul(source, path)), {})
}
_statementsToIstanbul (source, path) {
const statements = {
statementMap: {},
s: {}
}
source.lines.forEach((line, index) => {
statements.statementMap[`${index}`] = line.toIstanbul()
statements.s[`${index}`] = line.ignore ? 1 : line.count
})
return statements
}
_branchesToIstanbul (source, path) {
const branches = {
branchMap: {},
b: {}
}
this.branches[path] = this.branches[path] || []
this.branches[path].forEach((branch, index) => {
const srcLine = source.lines[branch.startLine - 1]
const ignore = srcLine === undefined ? true : srcLine.ignore
branches.branchMap[`${index}`] = branch.toIstanbul()
branches.b[`${index}`] = [ignore ? 1 : branch.count]
})
return branches
}
_functionsToIstanbul (source, path) {
const functions = {
fnMap: {},
f: {}
}
this.functions[path] = this.functions[path] || []
this.functions[path].forEach((fn, index) => {
const srcLine = source.lines[fn.startLine - 1]
const ignore = srcLine === undefined ? true : srcLine.ignore
functions.fnMap[`${index}`] = fn.toIstanbul()
functions.f[`${index}`] = ignore ? 1 : fn.count
})
return functions
}
}
function parsePath (scriptPath) {
return scriptPath.startsWith('file://') ? fileURLToPath(scriptPath) : scriptPath
}