refactor(ngcc): move sourcemaps
into ngtsc
(#37114)
The `SourceFile` and associated code is general and reusable in other projects (such as `@angular/localize`). Moving it to `ngtsc` makes it more easily shared. PR Close #37114
This commit is contained in:

committed by
Misko Hevery

parent
6de5a12a9d
commit
2b53b07c70
@ -25,6 +25,7 @@ ts_library(
|
||||
"//packages/compiler-cli/src/ngtsc/perf",
|
||||
"//packages/compiler-cli/src/ngtsc/reflection",
|
||||
"//packages/compiler-cli/src/ngtsc/scope",
|
||||
"//packages/compiler-cli/src/ngtsc/sourcemaps",
|
||||
"//packages/compiler-cli/src/ngtsc/transform",
|
||||
"//packages/compiler-cli/src/ngtsc/translator",
|
||||
"//packages/compiler-cli/src/ngtsc/util",
|
||||
@ -36,8 +37,6 @@ ts_library(
|
||||
"@npm//dependency-graph",
|
||||
"@npm//magic-string",
|
||||
"@npm//semver",
|
||||
"@npm//source-map",
|
||||
"@npm//sourcemap-codec",
|
||||
"@npm//typescript",
|
||||
],
|
||||
)
|
||||
|
@ -11,8 +11,7 @@ import * as ts from 'typescript';
|
||||
|
||||
import {absoluteFrom, absoluteFromSourceFile, basename, FileSystem} from '../../../src/ngtsc/file_system';
|
||||
import {Logger} from '../../../src/ngtsc/logging';
|
||||
import {RawSourceMap} from '../sourcemaps/raw_source_map';
|
||||
import {SourceFileLoader} from '../sourcemaps/source_file_loader';
|
||||
import {RawSourceMap, SourceFileLoader} from '../../../src/ngtsc/sourcemaps';
|
||||
|
||||
import {FileToWrite} from './utils';
|
||||
|
||||
|
@ -1,21 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google LLC All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
/**
|
||||
* This interface is the basic structure of the JSON in a raw source map that one might load from
|
||||
* disk.
|
||||
*/
|
||||
export interface RawSourceMap {
|
||||
version: number|string;
|
||||
file?: string;
|
||||
sourceRoot?: string;
|
||||
sources: string[];
|
||||
names: string[];
|
||||
sourcesContent?: (string|null)[];
|
||||
mappings: string;
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google LLC All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* A marker that indicates the start of a segment in a mapping.
|
||||
*
|
||||
* The end of a segment is indicated by the the first segment-marker of another mapping whose start
|
||||
* is greater or equal to this one.
|
||||
*/
|
||||
export interface SegmentMarker {
|
||||
readonly line: number;
|
||||
readonly column: number;
|
||||
readonly position: number;
|
||||
next: SegmentMarker|undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare two segment-markers, for use in a search or sorting algorithm.
|
||||
*
|
||||
* @returns a positive number if `a` is after `b`, a negative number if `b` is after `a`
|
||||
* and zero if they are at the same position.
|
||||
*/
|
||||
export function compareSegments(a: SegmentMarker, b: SegmentMarker): number {
|
||||
return a.position - b.position;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a new segment-marker that is offset by the given number of characters.
|
||||
*
|
||||
* @param startOfLinePositions the position of the start of each line of content of the source file
|
||||
* whose segment-marker we are offsetting.
|
||||
* @param marker the segment to offset.
|
||||
* @param offset the number of character to offset by.
|
||||
*/
|
||||
export function offsetSegment(
|
||||
startOfLinePositions: number[], marker: SegmentMarker, offset: number): SegmentMarker {
|
||||
if (offset === 0) {
|
||||
return marker;
|
||||
}
|
||||
|
||||
let line = marker.line;
|
||||
const position = marker.position + offset;
|
||||
while (line < startOfLinePositions.length - 1 && startOfLinePositions[line + 1] <= position) {
|
||||
line++;
|
||||
}
|
||||
while (line > 0 && startOfLinePositions[line] > position) {
|
||||
line--;
|
||||
}
|
||||
const column = position - startOfLinePositions[line];
|
||||
return {line, column, position, next: undefined};
|
||||
}
|
@ -1,406 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google LLC All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
import {removeComments, removeMapFileComments} from 'convert-source-map';
|
||||
import {decode, encode, SourceMapMappings, SourceMapSegment} from 'sourcemap-codec';
|
||||
|
||||
import {AbsoluteFsPath, dirname, relative} from '../../../src/ngtsc/file_system';
|
||||
|
||||
import {RawSourceMap} from './raw_source_map';
|
||||
import {compareSegments, offsetSegment, SegmentMarker} from './segment_marker';
|
||||
|
||||
export function removeSourceMapComments(contents: string): string {
|
||||
return removeMapFileComments(removeComments(contents)).replace(/\n\n$/, '\n');
|
||||
}
|
||||
|
||||
export class SourceFile {
|
||||
/**
|
||||
* The parsed mappings that have been flattened so that any intermediate source mappings have been
|
||||
* flattened.
|
||||
*
|
||||
* The result is that any source file mentioned in the flattened mappings have no source map (are
|
||||
* pure original source files).
|
||||
*/
|
||||
readonly flattenedMappings: Mapping[];
|
||||
readonly startOfLinePositions: number[];
|
||||
|
||||
constructor(
|
||||
/** The path to this source file. */
|
||||
readonly sourcePath: AbsoluteFsPath,
|
||||
/** The contents of this source file. */
|
||||
readonly contents: string,
|
||||
/** The raw source map (if any) associated with this source file. */
|
||||
readonly rawMap: RawSourceMap|null,
|
||||
/** Whether this source file's source map was inline or external. */
|
||||
readonly inline: boolean,
|
||||
/** Any source files referenced by the raw source map associated with this source file. */
|
||||
readonly sources: (SourceFile|null)[]) {
|
||||
this.contents = removeSourceMapComments(contents);
|
||||
this.startOfLinePositions = computeStartOfLinePositions(this.contents);
|
||||
this.flattenedMappings = this.flattenMappings();
|
||||
}
|
||||
|
||||
/**
|
||||
* Render the raw source map generated from the flattened mappings.
|
||||
*/
|
||||
renderFlattenedSourceMap(): RawSourceMap {
|
||||
const sources: SourceFile[] = [];
|
||||
const names: string[] = [];
|
||||
|
||||
const mappings: SourceMapMappings = [];
|
||||
|
||||
for (const mapping of this.flattenedMappings) {
|
||||
const sourceIndex = findIndexOrAdd(sources, mapping.originalSource);
|
||||
const mappingArray: SourceMapSegment = [
|
||||
mapping.generatedSegment.column,
|
||||
sourceIndex,
|
||||
mapping.originalSegment.line,
|
||||
mapping.originalSegment.column,
|
||||
];
|
||||
if (mapping.name !== undefined) {
|
||||
const nameIndex = findIndexOrAdd(names, mapping.name);
|
||||
mappingArray.push(nameIndex);
|
||||
}
|
||||
|
||||
// Ensure a mapping line array for this mapping.
|
||||
const line = mapping.generatedSegment.line;
|
||||
while (line >= mappings.length) {
|
||||
mappings.push([]);
|
||||
}
|
||||
// Add this mapping to the line
|
||||
mappings[line].push(mappingArray);
|
||||
}
|
||||
|
||||
const sourcePathDir = dirname(this.sourcePath);
|
||||
const sourceMap: RawSourceMap = {
|
||||
version: 3,
|
||||
file: relative(sourcePathDir, this.sourcePath),
|
||||
sources: sources.map(sf => relative(sourcePathDir, sf.sourcePath)),
|
||||
names,
|
||||
mappings: encode(mappings),
|
||||
sourcesContent: sources.map(sf => sf.contents),
|
||||
};
|
||||
return sourceMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Flatten the parsed mappings for this source file, so that all the mappings are to pure original
|
||||
* source files with no transitive source maps.
|
||||
*/
|
||||
private flattenMappings(): Mapping[] {
|
||||
const mappings = parseMappings(this.rawMap, this.sources, this.startOfLinePositions);
|
||||
ensureOriginalSegmentLinks(mappings);
|
||||
const flattenedMappings: Mapping[] = [];
|
||||
for (let mappingIndex = 0; mappingIndex < mappings.length; mappingIndex++) {
|
||||
const aToBmapping = mappings[mappingIndex];
|
||||
const bSource = aToBmapping.originalSource;
|
||||
if (bSource.flattenedMappings.length === 0) {
|
||||
// The b source file has no mappings of its own (i.e. it is a pure original file)
|
||||
// so just use the mapping as-is.
|
||||
flattenedMappings.push(aToBmapping);
|
||||
continue;
|
||||
}
|
||||
|
||||
// The `incomingStart` and `incomingEnd` are the `SegmentMarker`s in `B` that represent the
|
||||
// section of `B` source file that is being mapped to by the current `aToBmapping`.
|
||||
//
|
||||
// For example, consider the mappings from A to B:
|
||||
//
|
||||
// src A src B mapping
|
||||
//
|
||||
// a ----- a [0, 0]
|
||||
// b b
|
||||
// f - /- c [4, 2]
|
||||
// g \ / d
|
||||
// c -/\ e
|
||||
// d \- f [2, 5]
|
||||
// e
|
||||
//
|
||||
// For mapping [0,0] the incoming start and end are 0 and 2 (i.e. the range a, b, c)
|
||||
// For mapping [4,2] the incoming start and end are 2 and 5 (i.e. the range c, d, e, f)
|
||||
//
|
||||
const incomingStart = aToBmapping.originalSegment;
|
||||
const incomingEnd = incomingStart.next;
|
||||
|
||||
// The `outgoingStartIndex` and `outgoingEndIndex` are the indices of the range of mappings
|
||||
// that leave `b` that we are interested in merging with the aToBmapping.
|
||||
// We actually care about all the markers from the last bToCmapping directly before the
|
||||
// `incomingStart` to the last bToCmaping directly before the `incomingEnd`, inclusive.
|
||||
//
|
||||
// For example, if we consider the range 2 to 5 from above (i.e. c, d, e, f) with the
|
||||
// following mappings from B to C:
|
||||
//
|
||||
// src B src C mapping
|
||||
// a
|
||||
// b ----- b [1, 0]
|
||||
// - c c
|
||||
// | d d
|
||||
// | e ----- 1 [4, 3]
|
||||
// - f \ 2
|
||||
// \ 3
|
||||
// \- e [4, 6]
|
||||
//
|
||||
// The range with `incomingStart` at 2 and `incomingEnd` at 5 has outgoing start mapping of
|
||||
// [1,0] and outgoing end mapping of [4, 6], which also includes [4, 3].
|
||||
//
|
||||
let outgoingStartIndex =
|
||||
findLastMappingIndexBefore(bSource.flattenedMappings, incomingStart, false, 0);
|
||||
if (outgoingStartIndex < 0) {
|
||||
outgoingStartIndex = 0;
|
||||
}
|
||||
const outgoingEndIndex = incomingEnd !== undefined ?
|
||||
findLastMappingIndexBefore(
|
||||
bSource.flattenedMappings, incomingEnd, true, outgoingStartIndex) :
|
||||
bSource.flattenedMappings.length - 1;
|
||||
|
||||
for (let bToCmappingIndex = outgoingStartIndex; bToCmappingIndex <= outgoingEndIndex;
|
||||
bToCmappingIndex++) {
|
||||
const bToCmapping: Mapping = bSource.flattenedMappings[bToCmappingIndex];
|
||||
flattenedMappings.push(mergeMappings(this, aToBmapping, bToCmapping));
|
||||
}
|
||||
}
|
||||
return flattenedMappings;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param mappings The collection of mappings whose segment-markers we are searching.
|
||||
* @param marker The segment-marker to match against those of the given `mappings`.
|
||||
* @param exclusive If exclusive then we must find a mapping with a segment-marker that is
|
||||
* exclusively earlier than the given `marker`.
|
||||
* If not exclusive then we can return the highest mappings with an equivalent segment-marker to the
|
||||
* given `marker`.
|
||||
* @param lowerIndex If provided, this is used as a hint that the marker we are searching for has an
|
||||
* index that is no lower than this.
|
||||
*/
|
||||
export function findLastMappingIndexBefore(
|
||||
mappings: Mapping[], marker: SegmentMarker, exclusive: boolean, lowerIndex: number): number {
|
||||
let upperIndex = mappings.length - 1;
|
||||
const test = exclusive ? -1 : 0;
|
||||
|
||||
if (compareSegments(mappings[lowerIndex].generatedSegment, marker) > test) {
|
||||
// Exit early since the marker is outside the allowed range of mappings.
|
||||
return -1;
|
||||
}
|
||||
|
||||
let matchingIndex = -1;
|
||||
while (lowerIndex <= upperIndex) {
|
||||
const index = (upperIndex + lowerIndex) >> 1;
|
||||
if (compareSegments(mappings[index].generatedSegment, marker) <= test) {
|
||||
matchingIndex = index;
|
||||
lowerIndex = index + 1;
|
||||
} else {
|
||||
upperIndex = index - 1;
|
||||
}
|
||||
}
|
||||
return matchingIndex;
|
||||
}
|
||||
|
||||
/**
|
||||
* A Mapping consists of two segment markers: one in the generated source and one in the original
|
||||
* source, which indicate the start of each segment. The end of a segment is indicated by the first
|
||||
* segment marker of another mapping whose start is greater or equal to this one.
|
||||
*
|
||||
* It may also include a name associated with the segment being mapped.
|
||||
*/
|
||||
export interface Mapping {
|
||||
readonly generatedSegment: SegmentMarker;
|
||||
readonly originalSource: SourceFile;
|
||||
readonly originalSegment: SegmentMarker;
|
||||
readonly name?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the index of `item` in the `items` array.
|
||||
* If it is not found, then push `item` to the end of the array and return its new index.
|
||||
*
|
||||
* @param items the collection in which to look for `item`.
|
||||
* @param item the item to look for.
|
||||
* @returns the index of the `item` in the `items` array.
|
||||
*/
|
||||
function findIndexOrAdd<T>(items: T[], item: T): number {
|
||||
const itemIndex = items.indexOf(item);
|
||||
if (itemIndex > -1) {
|
||||
return itemIndex;
|
||||
} else {
|
||||
items.push(item);
|
||||
return items.length - 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Merge two mappings that go from A to B and B to C, to result in a mapping that goes from A to C.
|
||||
*/
|
||||
export function mergeMappings(generatedSource: SourceFile, ab: Mapping, bc: Mapping): Mapping {
|
||||
const name = bc.name || ab.name;
|
||||
|
||||
// We need to modify the segment-markers of the new mapping to take into account the shifts that
|
||||
// occur due to the combination of the two mappings.
|
||||
// For example:
|
||||
|
||||
// * Simple map where the B->C starts at the same place the A->B ends:
|
||||
//
|
||||
// ```
|
||||
// A: 1 2 b c d
|
||||
// | A->B [2,0]
|
||||
// | |
|
||||
// B: b c d A->C [2,1]
|
||||
// | |
|
||||
// | B->C [0,1]
|
||||
// C: a b c d e
|
||||
// ```
|
||||
|
||||
// * More complicated case where diffs of segment-markers is needed:
|
||||
//
|
||||
// ```
|
||||
// A: b 1 2 c d
|
||||
// \
|
||||
// | A->B [0,1*] [0,1*]
|
||||
// | | |+3
|
||||
// B: a b 1 2 c d A->C [0,1] [3,2]
|
||||
// | / |+1 |
|
||||
// | / B->C [0*,0] [4*,2]
|
||||
// | /
|
||||
// C: a b c d e
|
||||
// ```
|
||||
//
|
||||
// `[0,1]` mapping from A->C:
|
||||
// The difference between the "original segment-marker" of A->B (1*) and the "generated
|
||||
// segment-marker of B->C (0*): `1 - 0 = +1`.
|
||||
// Since it is positive we must increment the "original segment-marker" with `1` to give [0,1].
|
||||
//
|
||||
// `[3,2]` mapping from A->C:
|
||||
// The difference between the "original segment-marker" of A->B (1*) and the "generated
|
||||
// segment-marker" of B->C (4*): `1 - 4 = -3`.
|
||||
// Since it is negative we must increment the "generated segment-marker" with `3` to give [3,2].
|
||||
|
||||
const diff = compareSegments(bc.generatedSegment, ab.originalSegment);
|
||||
if (diff > 0) {
|
||||
return {
|
||||
name,
|
||||
generatedSegment:
|
||||
offsetSegment(generatedSource.startOfLinePositions, ab.generatedSegment, diff),
|
||||
originalSource: bc.originalSource,
|
||||
originalSegment: bc.originalSegment,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
name,
|
||||
generatedSegment: ab.generatedSegment,
|
||||
originalSource: bc.originalSource,
|
||||
originalSegment:
|
||||
offsetSegment(bc.originalSource.startOfLinePositions, bc.originalSegment, -diff),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the `rawMappings` into an array of parsed mappings, which reference source-files provided
|
||||
* in the `sources` parameter.
|
||||
*/
|
||||
export function parseMappings(
|
||||
rawMap: RawSourceMap|null, sources: (SourceFile|null)[],
|
||||
generatedSourceStartOfLinePositions: number[]): Mapping[] {
|
||||
if (rawMap === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const rawMappings = decode(rawMap.mappings);
|
||||
if (rawMappings === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const mappings: Mapping[] = [];
|
||||
for (let generatedLine = 0; generatedLine < rawMappings.length; generatedLine++) {
|
||||
const generatedLineMappings = rawMappings[generatedLine];
|
||||
for (const rawMapping of generatedLineMappings) {
|
||||
if (rawMapping.length >= 4) {
|
||||
const originalSource = sources[rawMapping[1]!];
|
||||
if (originalSource === null || originalSource === undefined) {
|
||||
// the original source is missing so ignore this mapping
|
||||
continue;
|
||||
}
|
||||
const generatedColumn = rawMapping[0];
|
||||
const name = rawMapping.length === 5 ? rawMap.names[rawMapping[4]] : undefined;
|
||||
const line = rawMapping[2]!;
|
||||
const column = rawMapping[3]!;
|
||||
const generatedSegment: SegmentMarker = {
|
||||
line: generatedLine,
|
||||
column: generatedColumn,
|
||||
position: generatedSourceStartOfLinePositions[generatedLine] + generatedColumn,
|
||||
next: undefined,
|
||||
};
|
||||
const originalSegment: SegmentMarker = {
|
||||
line,
|
||||
column,
|
||||
position: originalSource.startOfLinePositions[line] + column,
|
||||
next: undefined,
|
||||
};
|
||||
mappings.push({name, generatedSegment, originalSegment, originalSource});
|
||||
}
|
||||
}
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the segment markers from the original source files in each mapping of an array of
|
||||
* `mappings`.
|
||||
*
|
||||
* @param mappings The mappings whose original segments we want to extract
|
||||
* @returns Return a map from original source-files (referenced in the `mappings`) to arrays of
|
||||
* segment-markers sorted by their order in their source file.
|
||||
*/
|
||||
export function extractOriginalSegments(mappings: Mapping[]): Map<SourceFile, SegmentMarker[]> {
|
||||
const originalSegments = new Map<SourceFile, SegmentMarker[]>();
|
||||
for (const mapping of mappings) {
|
||||
const originalSource = mapping.originalSource;
|
||||
if (!originalSegments.has(originalSource)) {
|
||||
originalSegments.set(originalSource, []);
|
||||
}
|
||||
const segments = originalSegments.get(originalSource)!;
|
||||
segments.push(mapping.originalSegment);
|
||||
}
|
||||
originalSegments.forEach(segmentMarkers => segmentMarkers.sort(compareSegments));
|
||||
return originalSegments;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the original segments of each of the given `mappings` to include a link to the next
|
||||
* segment in the source file.
|
||||
*
|
||||
* @param mappings the mappings whose segments should be updated
|
||||
*/
|
||||
export function ensureOriginalSegmentLinks(mappings: Mapping[]): void {
|
||||
const segmentsBySource = extractOriginalSegments(mappings);
|
||||
segmentsBySource.forEach(markers => {
|
||||
for (let i = 0; i < markers.length - 1; i++) {
|
||||
markers[i].next = markers[i + 1];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function computeStartOfLinePositions(str: string) {
|
||||
// The `1` is to indicate a newline character between the lines.
|
||||
// Note that in the actual contents there could be more than one character that indicates a
|
||||
// newline
|
||||
// - e.g. \r\n - but that is not important here since segment-markers are in line/column pairs and
|
||||
// so differences in length due to extra `\r` characters do not affect the algorithms.
|
||||
const NEWLINE_MARKER_OFFSET = 1;
|
||||
const lineLengths = computeLineLengths(str);
|
||||
const startPositions = [0]; // First line starts at position 0
|
||||
for (let i = 0; i < lineLengths.length - 1; i++) {
|
||||
startPositions.push(startPositions[i] + lineLengths[i] + NEWLINE_MARKER_OFFSET);
|
||||
}
|
||||
return startPositions;
|
||||
}
|
||||
|
||||
function computeLineLengths(str: string): number[] {
|
||||
return (str.split(/\r?\n/)).map(s => s.length);
|
||||
}
|
@ -1,179 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google LLC All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
import {commentRegex, fromComment, mapFileCommentRegex} from 'convert-source-map';
|
||||
|
||||
import {absoluteFrom, AbsoluteFsPath, FileSystem} from '../../../src/ngtsc/file_system';
|
||||
import {Logger} from '../../../src/ngtsc/logging';
|
||||
|
||||
import {RawSourceMap} from './raw_source_map';
|
||||
import {SourceFile} from './source_file';
|
||||
|
||||
/**
|
||||
* This class can be used to load a source file, its associated source map and any upstream sources.
|
||||
*
|
||||
* Since a source file might reference (or include) a source map, this class can load those too.
|
||||
* Since a source map might reference other source files, these are also loaded as needed.
|
||||
*
|
||||
* This is done recursively. The result is a "tree" of `SourceFile` objects, each containing
|
||||
* mappings to other `SourceFile` objects as necessary.
|
||||
*/
|
||||
export class SourceFileLoader {
|
||||
private currentPaths: AbsoluteFsPath[] = [];
|
||||
|
||||
constructor(private fs: FileSystem, private logger: Logger) {}
|
||||
|
||||
/**
|
||||
* Load a source file, compute its source map, and recursively load any referenced source files.
|
||||
*
|
||||
* @param sourcePath The path to the source file to load.
|
||||
* @param contents The contents of the source file to load.
|
||||
* @param mapAndPath The raw source-map and the path to the source-map file.
|
||||
* @returns a SourceFile object created from the `contents` and provided source-map info.
|
||||
*/
|
||||
loadSourceFile(sourcePath: AbsoluteFsPath, contents: string, mapAndPath: MapAndPath): SourceFile;
|
||||
/**
|
||||
* The overload used internally to load source files referenced in a source-map.
|
||||
*
|
||||
* In this case there is no guarantee that it will return a non-null SourceMap.
|
||||
*
|
||||
* @param sourcePath The path to the source file to load.
|
||||
* @param contents The contents of the source file to load, if provided inline.
|
||||
* If it is not known the contents will be read from the file at the `sourcePath`.
|
||||
* @param mapAndPath The raw source-map and the path to the source-map file.
|
||||
*
|
||||
* @returns a SourceFile if the content for one was provided or able to be loaded from disk,
|
||||
* `null` otherwise.
|
||||
*/
|
||||
loadSourceFile(sourcePath: AbsoluteFsPath, contents?: string|null, mapAndPath?: null): SourceFile
|
||||
|null;
|
||||
loadSourceFile(
|
||||
sourcePath: AbsoluteFsPath, contents: string|null = null,
|
||||
mapAndPath: MapAndPath|null = null): SourceFile|null {
|
||||
const previousPaths = this.currentPaths.slice();
|
||||
try {
|
||||
if (contents === null) {
|
||||
if (!this.fs.exists(sourcePath)) {
|
||||
return null;
|
||||
}
|
||||
contents = this.readSourceFile(sourcePath);
|
||||
}
|
||||
|
||||
// If not provided try to load the source map based on the source itself
|
||||
if (mapAndPath === null) {
|
||||
mapAndPath = this.loadSourceMap(sourcePath, contents);
|
||||
}
|
||||
|
||||
let map: RawSourceMap|null = null;
|
||||
let inline = true;
|
||||
let sources: (SourceFile|null)[] = [];
|
||||
if (mapAndPath !== null) {
|
||||
const basePath = mapAndPath.mapPath || sourcePath;
|
||||
sources = this.processSources(basePath, mapAndPath.map);
|
||||
map = mapAndPath.map;
|
||||
inline = mapAndPath.mapPath === null;
|
||||
}
|
||||
|
||||
return new SourceFile(sourcePath, contents, map, inline, sources);
|
||||
} catch (e) {
|
||||
this.logger.warn(
|
||||
`Unable to fully load ${sourcePath} for source-map flattening: ${e.message}`);
|
||||
return null;
|
||||
} finally {
|
||||
// We are finished with this recursion so revert the paths being tracked
|
||||
this.currentPaths = previousPaths;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the source map associated with the source file whose `sourcePath` and `contents` are
|
||||
* provided.
|
||||
*
|
||||
* Source maps can be inline, as part of a base64 encoded comment, or external as a separate file
|
||||
* whose path is indicated in a comment or implied from the name of the source file itself.
|
||||
*/
|
||||
private loadSourceMap(sourcePath: AbsoluteFsPath, contents: string): MapAndPath|null {
|
||||
const inline = commentRegex.exec(contents);
|
||||
if (inline !== null) {
|
||||
return {map: fromComment(inline.pop()!).sourcemap, mapPath: null};
|
||||
}
|
||||
|
||||
const external = mapFileCommentRegex.exec(contents);
|
||||
if (external) {
|
||||
try {
|
||||
const fileName = external[1] || external[2];
|
||||
const externalMapPath = this.fs.resolve(this.fs.dirname(sourcePath), fileName);
|
||||
return {map: this.readRawSourceMap(externalMapPath), mapPath: externalMapPath};
|
||||
} catch (e) {
|
||||
this.logger.warn(
|
||||
`Unable to fully load ${sourcePath} for source-map flattening: ${e.message}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const impliedMapPath = absoluteFrom(sourcePath + '.map');
|
||||
if (this.fs.exists(impliedMapPath)) {
|
||||
return {map: this.readRawSourceMap(impliedMapPath), mapPath: impliedMapPath};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate over each of the "sources" for this source file's source map, recursively loading each
|
||||
* source file and its associated source map.
|
||||
*/
|
||||
private processSources(basePath: AbsoluteFsPath, map: RawSourceMap): (SourceFile|null)[] {
|
||||
const sourceRoot = this.fs.resolve(this.fs.dirname(basePath), map.sourceRoot || '');
|
||||
return map.sources.map((source, index) => {
|
||||
const path = this.fs.resolve(sourceRoot, source);
|
||||
const content = map.sourcesContent && map.sourcesContent[index] || null;
|
||||
return this.loadSourceFile(path, content, null);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the contents of the source file from disk.
|
||||
*
|
||||
* @param sourcePath The path to the source file.
|
||||
*/
|
||||
private readSourceFile(sourcePath: AbsoluteFsPath): string {
|
||||
this.trackPath(sourcePath);
|
||||
return this.fs.readFile(sourcePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the source map from the file at `mapPath`, parsing its JSON contents into a `RawSourceMap`
|
||||
* object.
|
||||
*
|
||||
* @param mapPath The path to the source-map file.
|
||||
*/
|
||||
private readRawSourceMap(mapPath: AbsoluteFsPath): RawSourceMap {
|
||||
this.trackPath(mapPath);
|
||||
return JSON.parse(this.fs.readFile(mapPath));
|
||||
}
|
||||
|
||||
/**
|
||||
* Track source file paths if we have loaded them from disk so that we don't get into an infinite
|
||||
* recursion.
|
||||
*/
|
||||
private trackPath(path: AbsoluteFsPath): void {
|
||||
if (this.currentPaths.includes(path)) {
|
||||
throw new Error(
|
||||
`Circular source file mapping dependency: ${this.currentPaths.join(' -> ')} -> ${path}`);
|
||||
}
|
||||
this.currentPaths.push(path);
|
||||
}
|
||||
}
|
||||
|
||||
/** A small helper structure that is returned from `loadSourceMap()`. */
|
||||
interface MapAndPath {
|
||||
/** The path to the source map if it was external or `null` if it was inline. */
|
||||
mapPath: AbsoluteFsPath|null;
|
||||
/** The raw source map itself. */
|
||||
map: RawSourceMap;
|
||||
}
|
@ -1,109 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google LLC All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
import {compareSegments, offsetSegment} from '../../src/sourcemaps/segment_marker';
|
||||
import {computeStartOfLinePositions} from '../../src/sourcemaps/source_file';
|
||||
|
||||
describe('SegmentMarker utils', () => {
|
||||
describe('compareSegments()', () => {
|
||||
it('should return 0 if the segments are the same', () => {
|
||||
expect(compareSegments(
|
||||
{line: 0, column: 0, position: 0, next: undefined},
|
||||
{line: 0, column: 0, position: 0, next: undefined}))
|
||||
.toEqual(0);
|
||||
expect(compareSegments(
|
||||
{line: 123, column: 0, position: 200, next: undefined},
|
||||
{line: 123, column: 0, position: 200, next: undefined}))
|
||||
.toEqual(0);
|
||||
expect(compareSegments(
|
||||
{line: 0, column: 45, position: 45, next: undefined},
|
||||
{line: 0, column: 45, position: 45, next: undefined}))
|
||||
.toEqual(0);
|
||||
expect(compareSegments(
|
||||
{line: 123, column: 45, position: 245, next: undefined},
|
||||
{line: 123, column: 45, position: 245, next: undefined}))
|
||||
.toEqual(0);
|
||||
});
|
||||
|
||||
it('should return a negative number if the first segment is before the second segment', () => {
|
||||
expect(compareSegments(
|
||||
{line: 0, column: 0, position: 0, next: undefined},
|
||||
{line: 0, column: 45, position: 45, next: undefined}))
|
||||
.toBeLessThan(0);
|
||||
expect(compareSegments(
|
||||
{line: 123, column: 0, position: 200, next: undefined},
|
||||
{line: 123, column: 45, position: 245, next: undefined}))
|
||||
.toBeLessThan(0);
|
||||
expect(compareSegments(
|
||||
{line: 13, column: 45, position: 75, next: undefined},
|
||||
{line: 123, column: 45, position: 245, next: undefined}))
|
||||
.toBeLessThan(0);
|
||||
expect(compareSegments(
|
||||
{line: 13, column: 45, position: 75, next: undefined},
|
||||
{line: 123, column: 9, position: 209, next: undefined}))
|
||||
.toBeLessThan(0);
|
||||
});
|
||||
|
||||
it('should return a positive number if the first segment is after the second segment', () => {
|
||||
expect(compareSegments(
|
||||
{line: 0, column: 45, position: 45, next: undefined},
|
||||
{line: 0, column: 0, position: 0, next: undefined}))
|
||||
.toBeGreaterThan(0);
|
||||
expect(compareSegments(
|
||||
{line: 123, column: 45, position: 245, next: undefined},
|
||||
{line: 123, column: 0, position: 200, next: undefined}))
|
||||
.toBeGreaterThan(0);
|
||||
expect(compareSegments(
|
||||
{line: 123, column: 45, position: 245, next: undefined},
|
||||
{line: 13, column: 45, position: 75, next: undefined}))
|
||||
.toBeGreaterThan(0);
|
||||
expect(compareSegments(
|
||||
{line: 123, column: 9, position: 209, next: undefined},
|
||||
{line: 13, column: 45, position: 75, next: undefined}))
|
||||
.toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('offsetSegment()', () => {
|
||||
it('should return an identical marker if offset is 0', () => {
|
||||
const startOfLinePositions =
|
||||
computeStartOfLinePositions('012345\n0123456789\r\n012*4567\n0123456');
|
||||
const marker = {line: 2, column: 3, position: 20, next: undefined};
|
||||
expect(offsetSegment(startOfLinePositions, marker, 0)).toBe(marker);
|
||||
});
|
||||
|
||||
it('should return a new marker offset by the given chars', () => {
|
||||
const startOfLinePositions =
|
||||
computeStartOfLinePositions('012345\n0123456789\r\n012*4567\n0123456');
|
||||
const marker = {line: 2, column: 3, position: 21, next: undefined};
|
||||
expect(offsetSegment(startOfLinePositions, marker, 1))
|
||||
.toEqual({line: 2, column: 4, position: 22, next: undefined});
|
||||
expect(offsetSegment(startOfLinePositions, marker, 2))
|
||||
.toEqual({line: 2, column: 5, position: 23, next: undefined});
|
||||
expect(offsetSegment(startOfLinePositions, marker, 4))
|
||||
.toEqual({line: 2, column: 7, position: 25, next: undefined});
|
||||
expect(offsetSegment(startOfLinePositions, marker, 6))
|
||||
.toEqual({line: 3, column: 0, position: 27, next: undefined});
|
||||
expect(offsetSegment(startOfLinePositions, marker, 8))
|
||||
.toEqual({line: 3, column: 2, position: 29, next: undefined});
|
||||
expect(offsetSegment(startOfLinePositions, marker, 20))
|
||||
.toEqual({line: 3, column: 14, position: 41, next: undefined});
|
||||
expect(offsetSegment(startOfLinePositions, marker, -1))
|
||||
.toEqual({line: 2, column: 2, position: 20, next: undefined});
|
||||
expect(offsetSegment(startOfLinePositions, marker, -2))
|
||||
.toEqual({line: 2, column: 1, position: 19, next: undefined});
|
||||
expect(offsetSegment(startOfLinePositions, marker, -3))
|
||||
.toEqual({line: 2, column: 0, position: 18, next: undefined});
|
||||
expect(offsetSegment(startOfLinePositions, marker, -4))
|
||||
.toEqual({line: 1, column: 10, position: 17, next: undefined});
|
||||
expect(offsetSegment(startOfLinePositions, marker, -6))
|
||||
.toEqual({line: 1, column: 8, position: 15, next: undefined});
|
||||
expect(offsetSegment(startOfLinePositions, marker, -16))
|
||||
.toEqual({line: 0, column: 5, position: 5, next: undefined});
|
||||
});
|
||||
});
|
||||
});
|
@ -1,296 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google LLC All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
import {absoluteFrom, FileSystem, getFileSystem} from '@angular/compiler-cli/src/ngtsc/file_system';
|
||||
import {fromObject} from 'convert-source-map';
|
||||
|
||||
import {runInEachFileSystem} from '../../../src/ngtsc/file_system/testing';
|
||||
import {MockLogger} from '../../../src/ngtsc/logging/testing';
|
||||
import {RawSourceMap} from '../../src/sourcemaps/raw_source_map';
|
||||
import {SourceFileLoader as SourceFileLoader} from '../../src/sourcemaps/source_file_loader';
|
||||
|
||||
runInEachFileSystem(() => {
|
||||
describe('SourceFileLoader', () => {
|
||||
let fs: FileSystem;
|
||||
let logger: MockLogger;
|
||||
let _: typeof absoluteFrom;
|
||||
let registry: SourceFileLoader;
|
||||
beforeEach(() => {
|
||||
fs = getFileSystem();
|
||||
logger = new MockLogger();
|
||||
_ = absoluteFrom;
|
||||
registry = new SourceFileLoader(fs, logger);
|
||||
});
|
||||
|
||||
describe('loadSourceFile', () => {
|
||||
it('should load a file with no source map and inline contents', () => {
|
||||
const sourceFile = registry.loadSourceFile(_('/foo/src/index.js'), 'some inline content');
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.contents).toEqual('some inline content');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/index.js'));
|
||||
expect(sourceFile.rawMap).toEqual(null);
|
||||
expect(sourceFile.sources).toEqual([]);
|
||||
});
|
||||
|
||||
it('should load a file with no source map and read its contents from disk', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
fs.writeFile(_('/foo/src/index.js'), 'some external content');
|
||||
const sourceFile = registry.loadSourceFile(_('/foo/src/index.js'));
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.contents).toEqual('some external content');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/index.js'));
|
||||
expect(sourceFile.rawMap).toEqual(null);
|
||||
expect(sourceFile.sources).toEqual([]);
|
||||
});
|
||||
|
||||
it('should load a file with an external source map', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
const sourceMap = createRawSourceMap({file: 'index.js'});
|
||||
fs.writeFile(_('/foo/src/external.js.map'), JSON.stringify(sourceMap));
|
||||
const sourceFile = registry.loadSourceFile(
|
||||
_('/foo/src/index.js'), 'some inline content\n//# sourceMappingURL=external.js.map');
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toEqual(sourceMap);
|
||||
});
|
||||
|
||||
it('should handle a missing external source map', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
const sourceFile = registry.loadSourceFile(
|
||||
_('/foo/src/index.js'), 'some inline content\n//# sourceMappingURL=external.js.map');
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toBe(null);
|
||||
});
|
||||
|
||||
it('should load a file with an inline encoded source map', () => {
|
||||
const sourceMap = createRawSourceMap({file: 'index.js'});
|
||||
const encodedSourceMap = Buffer.from(JSON.stringify(sourceMap)).toString('base64');
|
||||
const sourceFile = registry.loadSourceFile(
|
||||
_('/foo/src/index.js'),
|
||||
`some inline content\n//# sourceMappingURL=data:application/json;charset=utf-8;base64,${
|
||||
encodedSourceMap}`);
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toEqual(sourceMap);
|
||||
});
|
||||
|
||||
it('should load a file with an implied source map', () => {
|
||||
const sourceMap = createRawSourceMap({file: 'index.js'});
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
fs.writeFile(_('/foo/src/index.js.map'), JSON.stringify(sourceMap));
|
||||
const sourceFile = registry.loadSourceFile(_('/foo/src/index.js'), 'some inline content');
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toEqual(sourceMap);
|
||||
});
|
||||
|
||||
it('should handle missing implied source-map file', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
const sourceFile = registry.loadSourceFile(_('/foo/src/index.js'), 'some inline content');
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toBe(null);
|
||||
});
|
||||
|
||||
it('should recurse into external original source files that are referenced from source maps',
|
||||
() => {
|
||||
// Setup a scenario where the generated files reference previous files:
|
||||
//
|
||||
// index.js
|
||||
// -> x.js
|
||||
// -> y.js
|
||||
// -> a.js
|
||||
// -> z.js (inline content)
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
const indexSourceMap = createRawSourceMap({
|
||||
file: 'index.js',
|
||||
sources: ['x.js', 'y.js', 'z.js'],
|
||||
'sourcesContent': [null, null, 'z content']
|
||||
});
|
||||
fs.writeFile(_('/foo/src/index.js.map'), JSON.stringify(indexSourceMap));
|
||||
|
||||
fs.writeFile(_('/foo/src/x.js'), 'x content');
|
||||
|
||||
const ySourceMap = createRawSourceMap({file: 'y.js', sources: ['a.js']});
|
||||
fs.writeFile(_('/foo/src/y.js'), 'y content');
|
||||
fs.writeFile(_('/foo/src/y.js.map'), JSON.stringify(ySourceMap));
|
||||
fs.writeFile(_('/foo/src/z.js'), 'z content');
|
||||
fs.writeFile(_('/foo/src/a.js'), 'a content');
|
||||
|
||||
const sourceFile = registry.loadSourceFile(_('/foo/src/index.js'), 'index content');
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
|
||||
expect(sourceFile.contents).toEqual('index content');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/index.js'));
|
||||
expect(sourceFile.rawMap).toEqual(indexSourceMap);
|
||||
|
||||
expect(sourceFile.sources.length).toEqual(3);
|
||||
|
||||
expect(sourceFile.sources[0]!.contents).toEqual('x content');
|
||||
expect(sourceFile.sources[0]!.sourcePath).toEqual(_('/foo/src/x.js'));
|
||||
expect(sourceFile.sources[0]!.rawMap).toEqual(null);
|
||||
expect(sourceFile.sources[0]!.sources).toEqual([]);
|
||||
|
||||
|
||||
expect(sourceFile.sources[1]!.contents).toEqual('y content');
|
||||
expect(sourceFile.sources[1]!.sourcePath).toEqual(_('/foo/src/y.js'));
|
||||
expect(sourceFile.sources[1]!.rawMap).toEqual(ySourceMap);
|
||||
|
||||
expect(sourceFile.sources[1]!.sources.length).toEqual(1);
|
||||
expect(sourceFile.sources[1]!.sources[0]!.contents).toEqual('a content');
|
||||
expect(sourceFile.sources[1]!.sources[0]!.sourcePath).toEqual(_('/foo/src/a.js'));
|
||||
expect(sourceFile.sources[1]!.sources[0]!.rawMap).toEqual(null);
|
||||
expect(sourceFile.sources[1]!.sources[0]!.sources).toEqual([]);
|
||||
|
||||
expect(sourceFile.sources[2]!.contents).toEqual('z content');
|
||||
expect(sourceFile.sources[2]!.sourcePath).toEqual(_('/foo/src/z.js'));
|
||||
expect(sourceFile.sources[2]!.rawMap).toEqual(null);
|
||||
expect(sourceFile.sources[2]!.sources).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle a missing source file referenced from a source-map', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
const indexSourceMap =
|
||||
createRawSourceMap({file: 'index.js', sources: ['x.js'], 'sourcesContent': [null]});
|
||||
fs.writeFile(_('/foo/src/index.js.map'), JSON.stringify(indexSourceMap));
|
||||
|
||||
const sourceFile = registry.loadSourceFile(_('/foo/src/index.js'), 'index content');
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
|
||||
expect(sourceFile.contents).toEqual('index content');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/index.js'));
|
||||
expect(sourceFile.rawMap).toEqual(indexSourceMap);
|
||||
expect(sourceFile.sources.length).toEqual(1);
|
||||
expect(sourceFile.sources[0]).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
it('should log a warning if there is a cyclic dependency in source files loaded from disk',
|
||||
() => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
const aMap = createRawSourceMap({file: 'a.js', sources: ['b.js']});
|
||||
|
||||
const aPath = _('/foo/src/a.js');
|
||||
fs.writeFile(aPath, 'a content\n' + fromObject(aMap).toComment());
|
||||
|
||||
const bPath = _('/foo/src/b.js');
|
||||
fs.writeFile(
|
||||
bPath,
|
||||
'b content\n' +
|
||||
fromObject(createRawSourceMap({file: 'b.js', sources: ['c.js']})).toComment());
|
||||
|
||||
const cPath = _('/foo/src/c.js');
|
||||
fs.writeFile(
|
||||
cPath,
|
||||
'c content\n' +
|
||||
fromObject(createRawSourceMap({file: 'c.js', sources: ['a.js']})).toComment());
|
||||
|
||||
const sourceFile = registry.loadSourceFile(aPath)!;
|
||||
expect(sourceFile).not.toBe(null!);
|
||||
expect(sourceFile.contents).toEqual('a content\n');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/a.js'));
|
||||
expect(sourceFile.rawMap).toEqual(aMap);
|
||||
expect(sourceFile.sources.length).toEqual(1);
|
||||
|
||||
expect(logger.logs.warn[0][0])
|
||||
.toContain(
|
||||
`Circular source file mapping dependency: ` +
|
||||
`${aPath} -> ${bPath} -> ${cPath} -> ${aPath}`);
|
||||
});
|
||||
|
||||
it('should log a warning if there is a cyclic dependency in source maps loaded from disk',
|
||||
() => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
// Create a self-referencing source-map
|
||||
const aMap = createRawSourceMap({
|
||||
file: 'a.js',
|
||||
sources: ['a.js'],
|
||||
sourcesContent: ['inline a.js content\n//# sourceMappingURL=a.js.map']
|
||||
});
|
||||
const aMapPath = _('/foo/src/a.js.map');
|
||||
fs.writeFile(aMapPath, JSON.stringify(aMap));
|
||||
|
||||
const aPath = _('/foo/src/a.js');
|
||||
fs.writeFile(aPath, 'a.js content\n//# sourceMappingURL=a.js.map');
|
||||
|
||||
const sourceFile = registry.loadSourceFile(aPath)!;
|
||||
expect(sourceFile).not.toBe(null!);
|
||||
expect(sourceFile.contents).toEqual('a.js content\n');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/a.js'));
|
||||
expect(sourceFile.rawMap).toEqual(aMap);
|
||||
expect(sourceFile.sources.length).toEqual(1);
|
||||
|
||||
expect(logger.logs.warn[0][0])
|
||||
.toContain(
|
||||
`Circular source file mapping dependency: ` +
|
||||
`${aPath} -> ${aMapPath} -> ${aMapPath}`);
|
||||
|
||||
const innerSourceFile = sourceFile.sources[0]!;
|
||||
expect(innerSourceFile).not.toBe(null!);
|
||||
expect(innerSourceFile.contents).toEqual('inline a.js content\n');
|
||||
expect(innerSourceFile.sourcePath).toEqual(_('/foo/src/a.js'));
|
||||
expect(innerSourceFile.rawMap).toEqual(null);
|
||||
expect(innerSourceFile.sources.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('should not fail if there is a cyclic dependency in filenames of inline sources', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
const aPath = _('/foo/src/a.js');
|
||||
fs.writeFile(
|
||||
aPath,
|
||||
'a content\n' +
|
||||
fromObject(createRawSourceMap({file: 'a.js', sources: ['b.js']})).toComment());
|
||||
|
||||
const bPath = _('/foo/src/b.js');
|
||||
fs.writeFile(bPath, 'b content');
|
||||
fs.writeFile(
|
||||
_('/foo/src/b.js.map'),
|
||||
JSON.stringify(createRawSourceMap({file: 'b.js', sources: ['c.js']})));
|
||||
|
||||
const cPath = _('/foo/src/c.js');
|
||||
fs.writeFile(cPath, 'c content');
|
||||
fs.writeFile(
|
||||
_('/foo/src/c.js.map'),
|
||||
JSON.stringify(createRawSourceMap(
|
||||
{file: 'c.js', sources: ['a.js'], sourcesContent: ['inline a.js content']})));
|
||||
|
||||
expect(() => registry.loadSourceFile(aPath)).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
function createRawSourceMap(custom: Partial<RawSourceMap>): RawSourceMap {
|
||||
return {
|
||||
'version': 3,
|
||||
'sourceRoot': '',
|
||||
'sources': [],
|
||||
'sourcesContent': [],
|
||||
'names': [],
|
||||
'mappings': '',
|
||||
...custom
|
||||
};
|
||||
}
|
@ -1,537 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright Google LLC All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
import {encode} from 'sourcemap-codec';
|
||||
|
||||
import {absoluteFrom} from '../../../src/ngtsc/file_system';
|
||||
import {runInEachFileSystem} from '../../../src/ngtsc/file_system/testing';
|
||||
import {RawSourceMap} from '../../src/sourcemaps/raw_source_map';
|
||||
import {SegmentMarker} from '../../src/sourcemaps/segment_marker';
|
||||
import {computeStartOfLinePositions, ensureOriginalSegmentLinks, extractOriginalSegments, findLastMappingIndexBefore, Mapping, parseMappings, SourceFile} from '../../src/sourcemaps/source_file';
|
||||
|
||||
runInEachFileSystem(() => {
|
||||
describe('SourceFile and utilities', () => {
|
||||
let _: typeof absoluteFrom;
|
||||
|
||||
beforeEach(() => {
|
||||
_ = absoluteFrom;
|
||||
});
|
||||
|
||||
describe('parseMappings()', () => {
|
||||
it('should be an empty array for source files with no source map', () => {
|
||||
const mappings = parseMappings(null, [], []);
|
||||
expect(mappings).toEqual([]);
|
||||
});
|
||||
|
||||
it('should be empty array for source files with no source map mappings', () => {
|
||||
const rawSourceMap: RawSourceMap = {mappings: '', names: [], sources: [], version: 3};
|
||||
const mappings = parseMappings(rawSourceMap, [], []);
|
||||
expect(mappings).toEqual([]);
|
||||
});
|
||||
|
||||
it('should parse the mappings from the raw source map', () => {
|
||||
const rawSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [6, 0, 0, 3]]]),
|
||||
names: [],
|
||||
sources: ['a.js'],
|
||||
version: 3
|
||||
};
|
||||
const originalSource = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, false, []);
|
||||
const mappings = parseMappings(rawSourceMap, [originalSource], [0, 8]);
|
||||
expect(mappings).toEqual([
|
||||
{
|
||||
generatedSegment: {line: 0, column: 0, position: 0, next: undefined},
|
||||
originalSource,
|
||||
originalSegment: {line: 0, column: 0, position: 0, next: undefined},
|
||||
name: undefined
|
||||
},
|
||||
{
|
||||
generatedSegment: {line: 0, column: 6, position: 6, next: undefined},
|
||||
originalSource,
|
||||
originalSegment: {line: 0, column: 3, position: 3, next: undefined},
|
||||
name: undefined
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractOriginalSegments()', () => {
|
||||
it('should return an empty Map for source files with no source map', () => {
|
||||
expect(extractOriginalSegments(parseMappings(null, [], []))).toEqual(new Map());
|
||||
});
|
||||
|
||||
it('should be empty Map for source files with no source map mappings', () => {
|
||||
const rawSourceMap: RawSourceMap = {mappings: '', names: [], sources: [], version: 3};
|
||||
expect(extractOriginalSegments(parseMappings(rawSourceMap, [], []))).toEqual(new Map());
|
||||
});
|
||||
|
||||
it('should parse the segments in ascending order of original position from the raw source map',
|
||||
() => {
|
||||
const originalSource = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, false, []);
|
||||
const rawSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2]]]),
|
||||
names: [],
|
||||
sources: ['a.js'],
|
||||
version: 3
|
||||
};
|
||||
const originalSegments =
|
||||
extractOriginalSegments(parseMappings(rawSourceMap, [originalSource], [0, 8]));
|
||||
expect(originalSegments.get(originalSource)).toEqual([
|
||||
{line: 0, column: 0, position: 0, next: undefined},
|
||||
{line: 0, column: 2, position: 2, next: undefined},
|
||||
{line: 0, column: 3, position: 3, next: undefined},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should create separate arrays for each original source file', () => {
|
||||
const sourceA = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, false, []);
|
||||
const sourceB = new SourceFile(_('/foo/src/b.js'), '1234567', null, false, []);
|
||||
const rawSourceMap: RawSourceMap = {
|
||||
mappings:
|
||||
encode([[[0, 0, 0, 0], [2, 1, 0, 3], [4, 0, 0, 2], [5, 1, 0, 5], [6, 1, 0, 2]]]),
|
||||
names: [],
|
||||
sources: ['a.js', 'b.js'],
|
||||
version: 3
|
||||
};
|
||||
const originalSegments =
|
||||
extractOriginalSegments(parseMappings(rawSourceMap, [sourceA, sourceB], [0, 8]));
|
||||
expect(originalSegments.get(sourceA)).toEqual([
|
||||
{line: 0, column: 0, position: 0, next: undefined},
|
||||
{line: 0, column: 2, position: 2, next: undefined},
|
||||
]);
|
||||
expect(originalSegments.get(sourceB)).toEqual([
|
||||
{line: 0, column: 2, position: 2, next: undefined},
|
||||
{line: 0, column: 3, position: 3, next: undefined},
|
||||
{line: 0, column: 5, position: 5, next: undefined},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('findLastMappingIndexBefore', () => {
|
||||
it('should find the highest mapping index that has a segment marker below the given one if there is not an exact match',
|
||||
() => {
|
||||
const marker5: SegmentMarker = {line: 0, column: 50, position: 50, next: undefined};
|
||||
const marker4: SegmentMarker = {line: 0, column: 40, position: 40, next: marker5};
|
||||
const marker3: SegmentMarker = {line: 0, column: 30, position: 30, next: marker4};
|
||||
const marker2: SegmentMarker = {line: 0, column: 20, position: 20, next: marker3};
|
||||
const marker1: SegmentMarker = {line: 0, column: 10, position: 10, next: marker2};
|
||||
const mappings: Mapping[] = [marker1, marker2, marker3, marker4, marker5].map(
|
||||
marker => ({generatedSegment: marker} as Mapping));
|
||||
|
||||
const marker: SegmentMarker = {line: 0, column: 35, position: 35, next: undefined};
|
||||
const index = findLastMappingIndexBefore(mappings, marker, /* exclusive */ false, 0);
|
||||
expect(index).toEqual(2);
|
||||
});
|
||||
|
||||
it('should find the highest mapping index that has a segment marker (when there are duplicates) below the given one if there is not an exact match',
|
||||
() => {
|
||||
const marker5: SegmentMarker = {line: 0, column: 50, position: 50, next: undefined};
|
||||
const marker4: SegmentMarker = {line: 0, column: 30, position: 30, next: marker5};
|
||||
const marker3: SegmentMarker = {line: 0, column: 30, position: 30, next: marker4};
|
||||
const marker2: SegmentMarker = {line: 0, column: 20, position: 20, next: marker3};
|
||||
const marker1: SegmentMarker = {line: 0, column: 10, position: 10, next: marker2};
|
||||
const mappings: Mapping[] = [marker1, marker2, marker3, marker4, marker5].map(
|
||||
marker => ({generatedSegment: marker} as Mapping));
|
||||
|
||||
const marker: SegmentMarker = {line: 0, column: 35, position: 35, next: undefined};
|
||||
const index = findLastMappingIndexBefore(mappings, marker, /* exclusive */ false, 0);
|
||||
expect(index).toEqual(3);
|
||||
});
|
||||
|
||||
it('should find the last mapping if the segment marker is higher than all of them', () => {
|
||||
const marker5: SegmentMarker = {line: 0, column: 50, position: 50, next: undefined};
|
||||
const marker4: SegmentMarker = {line: 0, column: 40, position: 40, next: marker5};
|
||||
const marker3: SegmentMarker = {line: 0, column: 30, position: 30, next: marker4};
|
||||
const marker2: SegmentMarker = {line: 0, column: 20, position: 20, next: marker3};
|
||||
const marker1: SegmentMarker = {line: 0, column: 10, position: 10, next: marker2};
|
||||
const mappings: Mapping[] = [marker1, marker2, marker3, marker4, marker5].map(
|
||||
marker => ({generatedSegment: marker} as Mapping));
|
||||
|
||||
const marker: SegmentMarker = {line: 0, column: 60, position: 60, next: undefined};
|
||||
|
||||
const index = findLastMappingIndexBefore(mappings, marker, /* exclusive */ false, 0);
|
||||
expect(index).toEqual(4);
|
||||
});
|
||||
|
||||
it('should return -1 if the segment marker is lower than all of them', () => {
|
||||
const marker5: SegmentMarker = {line: 0, column: 50, position: 50, next: undefined};
|
||||
const marker4: SegmentMarker = {line: 0, column: 40, position: 40, next: marker5};
|
||||
const marker3: SegmentMarker = {line: 0, column: 30, position: 30, next: marker4};
|
||||
const marker2: SegmentMarker = {line: 0, column: 20, position: 20, next: marker3};
|
||||
const marker1: SegmentMarker = {line: 0, column: 10, position: 10, next: marker2};
|
||||
const mappings: Mapping[] = [marker1, marker2, marker3, marker4, marker5].map(
|
||||
marker => ({generatedSegment: marker} as Mapping));
|
||||
|
||||
const marker: SegmentMarker = {line: 0, column: 5, position: 5, next: undefined};
|
||||
|
||||
const index = findLastMappingIndexBefore(mappings, marker, /* exclusive */ false, 0);
|
||||
expect(index).toEqual(-1);
|
||||
});
|
||||
|
||||
describe('[exact match inclusive]', () => {
|
||||
it('should find the matching segment marker mapping index if there is only one of them',
|
||||
() => {
|
||||
const marker5: SegmentMarker = {line: 0, column: 50, position: 50, next: undefined};
|
||||
const marker4: SegmentMarker = {line: 0, column: 40, position: 40, next: marker5};
|
||||
const marker3: SegmentMarker = {line: 0, column: 30, position: 30, next: marker4};
|
||||
const marker2: SegmentMarker = {line: 0, column: 20, position: 20, next: marker3};
|
||||
const marker1: SegmentMarker = {line: 0, column: 10, position: 10, next: marker2};
|
||||
|
||||
const mappings: Mapping[] = [marker1, marker2, marker3, marker4, marker5].map(
|
||||
marker => ({generatedSegment: marker} as Mapping));
|
||||
const index = findLastMappingIndexBefore(mappings, marker3, /* exclusive */ false, 0);
|
||||
expect(index).toEqual(2);
|
||||
});
|
||||
|
||||
it('should find the highest matching segment marker mapping index if there is more than one of them',
|
||||
() => {
|
||||
const marker5: SegmentMarker = {line: 0, column: 50, position: 50, next: undefined};
|
||||
const marker4: SegmentMarker = {line: 0, column: 30, position: 30, next: marker5};
|
||||
const marker3: SegmentMarker = {line: 0, column: 30, position: 30, next: marker4};
|
||||
const marker2: SegmentMarker = {line: 0, column: 20, position: 20, next: marker3};
|
||||
const marker1: SegmentMarker = {line: 0, column: 10, position: 10, next: marker2};
|
||||
|
||||
const mappings: Mapping[] = [marker1, marker2, marker3, marker4, marker5].map(
|
||||
marker => ({generatedSegment: marker} as Mapping));
|
||||
const index = findLastMappingIndexBefore(mappings, marker3, /* exclusive */ false, 0);
|
||||
expect(index).toEqual(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('[exact match exclusive]', () => {
|
||||
it('should find the preceding mapping index if there is a matching segment marker', () => {
|
||||
const marker5: SegmentMarker = {line: 0, column: 50, position: 50, next: undefined};
|
||||
const marker4: SegmentMarker = {line: 0, column: 40, position: 40, next: marker5};
|
||||
const marker3: SegmentMarker = {line: 0, column: 30, position: 30, next: marker4};
|
||||
const marker2: SegmentMarker = {line: 0, column: 20, position: 20, next: marker3};
|
||||
const marker1: SegmentMarker = {line: 0, column: 10, position: 10, next: marker2};
|
||||
|
||||
const mappings: Mapping[] = [marker1, marker2, marker3, marker4, marker5].map(
|
||||
marker => ({generatedSegment: marker} as Mapping));
|
||||
const index = findLastMappingIndexBefore(mappings, marker3, /* exclusive */ true, 0);
|
||||
expect(index).toEqual(1);
|
||||
});
|
||||
|
||||
it('should find the highest preceding mapping index if there is more than one matching segment marker',
|
||||
() => {
|
||||
const marker5: SegmentMarker = {line: 0, column: 50, position: 50, next: undefined};
|
||||
const marker4: SegmentMarker = {line: 0, column: 30, position: 30, next: marker5};
|
||||
const marker3: SegmentMarker = {line: 0, column: 30, position: 30, next: marker4};
|
||||
const marker2: SegmentMarker = {line: 0, column: 20, position: 20, next: marker3};
|
||||
const marker1: SegmentMarker = {line: 0, column: 10, position: 10, next: marker2};
|
||||
|
||||
const mappings: Mapping[] = [marker1, marker2, marker3, marker4, marker5].map(
|
||||
marker => ({generatedSegment: marker} as Mapping));
|
||||
const index = findLastMappingIndexBefore(mappings, marker3, /* exclusive */ false, 0);
|
||||
expect(index).toEqual(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('[with lowerIndex hint', () => {
|
||||
it('should find the highest mapping index above the lowerIndex hint that has a segment marker below the given one if there is not an exact match',
|
||||
() => {
|
||||
const marker5: SegmentMarker = {line: 0, column: 50, position: 50, next: undefined};
|
||||
const marker4: SegmentMarker = {line: 0, column: 40, position: 40, next: marker5};
|
||||
const marker3: SegmentMarker = {line: 0, column: 30, position: 30, next: marker4};
|
||||
const marker2: SegmentMarker = {line: 0, column: 20, position: 20, next: marker3};
|
||||
const marker1: SegmentMarker = {line: 0, column: 10, position: 10, next: marker2};
|
||||
const mappings: Mapping[] = [marker1, marker2, marker3, marker4, marker5].map(
|
||||
marker => ({generatedSegment: marker} as Mapping));
|
||||
|
||||
const marker: SegmentMarker = {line: 0, column: 35, position: 35, next: undefined};
|
||||
const index = findLastMappingIndexBefore(mappings, marker, /* exclusive */ false, 1);
|
||||
expect(index).toEqual(2);
|
||||
});
|
||||
|
||||
it('should return the lowerIndex mapping index if there is a single exact match and we are not exclusive',
|
||||
() => {
|
||||
const marker5: SegmentMarker = {line: 0, column: 50, position: 50, next: undefined};
|
||||
const marker4: SegmentMarker = {line: 0, column: 40, position: 40, next: marker5};
|
||||
const marker3: SegmentMarker = {line: 0, column: 30, position: 30, next: marker4};
|
||||
const marker2: SegmentMarker = {line: 0, column: 20, position: 20, next: marker3};
|
||||
const marker1: SegmentMarker = {line: 0, column: 10, position: 10, next: marker2};
|
||||
const mappings: Mapping[] = [marker1, marker2, marker3, marker4, marker5].map(
|
||||
marker => ({generatedSegment: marker} as Mapping));
|
||||
|
||||
const marker: SegmentMarker = {line: 0, column: 30, position: 30, next: undefined};
|
||||
const index = findLastMappingIndexBefore(mappings, marker, /* exclusive */ false, 2);
|
||||
expect(index).toEqual(2);
|
||||
});
|
||||
|
||||
it('should return the lowerIndex mapping index if there are multiple exact matches and we are not exclusive',
|
||||
() => {
|
||||
const marker5: SegmentMarker = {line: 0, column: 50, position: 50, next: undefined};
|
||||
const marker4: SegmentMarker = {line: 0, column: 30, position: 30, next: marker5};
|
||||
const marker3: SegmentMarker = {line: 0, column: 30, position: 30, next: marker4};
|
||||
const marker2: SegmentMarker = {line: 0, column: 20, position: 20, next: marker3};
|
||||
const marker1: SegmentMarker = {line: 0, column: 10, position: 10, next: marker2};
|
||||
const mappings: Mapping[] = [marker1, marker2, marker3, marker4, marker5].map(
|
||||
marker => ({generatedSegment: marker} as Mapping));
|
||||
|
||||
const marker: SegmentMarker = {line: 0, column: 30, position: 30, next: undefined};
|
||||
const index = findLastMappingIndexBefore(mappings, marker, /* exclusive */ false, 3);
|
||||
expect(index).toEqual(3);
|
||||
});
|
||||
|
||||
it('should return -1 if the segment marker is lower than the lowerIndex hint', () => {
|
||||
const marker5: SegmentMarker = {line: 0, column: 50, position: 50, next: undefined};
|
||||
const marker4: SegmentMarker = {line: 0, column: 40, position: 40, next: marker5};
|
||||
const marker3: SegmentMarker = {line: 0, column: 30, position: 30, next: marker4};
|
||||
const marker2: SegmentMarker = {line: 0, column: 20, position: 20, next: marker3};
|
||||
const marker1: SegmentMarker = {line: 0, column: 10, position: 10, next: marker2};
|
||||
const mappings: Mapping[] = [marker1, marker2, marker3, marker4, marker5].map(
|
||||
marker => ({generatedSegment: marker} as Mapping));
|
||||
|
||||
const marker: SegmentMarker = {line: 0, column: 25, position: 25, next: undefined};
|
||||
|
||||
const index = findLastMappingIndexBefore(mappings, marker, /* exclusive */ false, 2);
|
||||
expect(index).toEqual(-1);
|
||||
});
|
||||
|
||||
it('should return -1 if the segment marker is equal to the lowerIndex hint and we are exclusive',
|
||||
() => {
|
||||
const marker5: SegmentMarker = {line: 0, column: 50, position: 50, next: undefined};
|
||||
const marker4: SegmentMarker = {line: 0, column: 40, position: 40, next: marker5};
|
||||
const marker3: SegmentMarker = {line: 0, column: 30, position: 30, next: marker4};
|
||||
const marker2: SegmentMarker = {line: 0, column: 20, position: 20, next: marker3};
|
||||
const marker1: SegmentMarker = {line: 0, column: 10, position: 10, next: marker2};
|
||||
const mappings: Mapping[] = [marker1, marker2, marker3, marker4, marker5].map(
|
||||
marker => ({generatedSegment: marker} as Mapping));
|
||||
|
||||
const marker: SegmentMarker = {line: 0, column: 30, position: 30, next: undefined};
|
||||
|
||||
const index = findLastMappingIndexBefore(mappings, marker, /* exclusive */ true, 2);
|
||||
expect(index).toEqual(-1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('ensureOriginalSegmentLinks', () => {
|
||||
it('should add `next` properties to each segment that point to the next segment in the same source file',
|
||||
() => {
|
||||
const sourceA = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, false, []);
|
||||
const sourceB = new SourceFile(_('/foo/src/b.js'), '1234567', null, false, []);
|
||||
const rawSourceMap: RawSourceMap = {
|
||||
mappings:
|
||||
encode([[[0, 0, 0, 0], [2, 1, 0, 3], [4, 0, 0, 2], [5, 1, 0, 5], [6, 1, 0, 2]]]),
|
||||
names: [],
|
||||
sources: ['a.js', 'b.js'],
|
||||
version: 3
|
||||
};
|
||||
const mappings = parseMappings(rawSourceMap, [sourceA, sourceB], [0, 8]);
|
||||
ensureOriginalSegmentLinks(mappings);
|
||||
expect(mappings[0].originalSegment.next).toBe(mappings[2].originalSegment);
|
||||
expect(mappings[1].originalSegment.next).toBe(mappings[3].originalSegment);
|
||||
expect(mappings[2].originalSegment.next).toBeUndefined();
|
||||
expect(mappings[3].originalSegment.next).toBeUndefined();
|
||||
expect(mappings[4].originalSegment.next).toBe(mappings[1].originalSegment);
|
||||
});
|
||||
});
|
||||
|
||||
describe('SourceFile', () => {
|
||||
describe('flattenedMappings', () => {
|
||||
it('should be an empty array for source files with no source map', () => {
|
||||
const sourceFile =
|
||||
new SourceFile(_('/foo/src/index.js'), 'index contents', null, false, []);
|
||||
expect(sourceFile.flattenedMappings).toEqual([]);
|
||||
});
|
||||
|
||||
it('should be empty array for source files with no source map mappings', () => {
|
||||
const rawSourceMap: RawSourceMap = {mappings: '', names: [], sources: [], version: 3};
|
||||
const sourceFile =
|
||||
new SourceFile(_('/foo/src/index.js'), 'index contents', rawSourceMap, false, []);
|
||||
expect(sourceFile.flattenedMappings).toEqual([]);
|
||||
});
|
||||
|
||||
it('should be the same as non-flat mappings if there is only one level of source map',
|
||||
() => {
|
||||
const rawSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [6, 0, 0, 3]]]),
|
||||
names: [],
|
||||
sources: ['a.js'],
|
||||
version: 3
|
||||
};
|
||||
const originalSource = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, false, []);
|
||||
const sourceFile = new SourceFile(
|
||||
_('/foo/src/index.js'), 'abc123defg', rawSourceMap, false, [originalSource]);
|
||||
expect(removeOriginalSegmentLinks(sourceFile.flattenedMappings))
|
||||
.toEqual(parseMappings(rawSourceMap, [originalSource], [0, 11]));
|
||||
});
|
||||
|
||||
it('should merge mappings from flattened original source files', () => {
|
||||
const cSource = new SourceFile(_('/foo/src/c.js'), 'bcd123', null, false, []);
|
||||
const dSource = new SourceFile(_('/foo/src/d.js'), 'aef', null, false, []);
|
||||
|
||||
const bSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 1, 0, 0], [1, 0, 0, 0], [4, 1, 0, 1]]]),
|
||||
names: [],
|
||||
sources: ['c.js', 'd.js'],
|
||||
version: 3
|
||||
};
|
||||
const bSource =
|
||||
new SourceFile(_('/foo/src/b.js'), 'abcdef', bSourceMap, false, [cSource, dSource]);
|
||||
|
||||
const aSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]]]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
};
|
||||
const aSource =
|
||||
new SourceFile(_('/foo/src/a.js'), 'abdecf', aSourceMap, false, [bSource]);
|
||||
|
||||
expect(removeOriginalSegmentLinks(aSource.flattenedMappings)).toEqual([
|
||||
{
|
||||
generatedSegment: {line: 0, column: 0, position: 0, next: undefined},
|
||||
originalSource: dSource,
|
||||
originalSegment: {line: 0, column: 0, position: 0, next: undefined},
|
||||
name: undefined
|
||||
},
|
||||
{
|
||||
generatedSegment: {line: 0, column: 1, position: 1, next: undefined},
|
||||
originalSource: cSource,
|
||||
originalSegment: {line: 0, column: 0, position: 0, next: undefined},
|
||||
name: undefined
|
||||
},
|
||||
{
|
||||
generatedSegment: {line: 0, column: 2, position: 2, next: undefined},
|
||||
originalSource: cSource,
|
||||
originalSegment: {line: 0, column: 2, position: 2, next: undefined},
|
||||
name: undefined
|
||||
},
|
||||
{
|
||||
generatedSegment: {line: 0, column: 3, position: 3, next: undefined},
|
||||
originalSource: dSource,
|
||||
originalSegment: {line: 0, column: 1, position: 1, next: undefined},
|
||||
name: undefined
|
||||
},
|
||||
{
|
||||
generatedSegment: {line: 0, column: 4, position: 4, next: undefined},
|
||||
originalSource: cSource,
|
||||
originalSegment: {line: 0, column: 1, position: 1, next: undefined},
|
||||
name: undefined
|
||||
},
|
||||
{
|
||||
generatedSegment: {line: 0, column: 5, position: 5, next: undefined},
|
||||
originalSource: dSource,
|
||||
originalSegment: {line: 0, column: 2, position: 2, next: undefined},
|
||||
name: undefined
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should ignore mappings to missing source files', () => {
|
||||
const bSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[1, 0, 0, 0], [4, 0, 0, 3], [4, 0, 0, 6], [5, 0, 0, 7]]]),
|
||||
names: [],
|
||||
sources: ['c.js'],
|
||||
version: 3
|
||||
};
|
||||
const bSource = new SourceFile(_('/foo/src/b.js'), 'abcdef', bSourceMap, false, [null]);
|
||||
const aSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]]]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
};
|
||||
const aSource =
|
||||
new SourceFile(_('/foo/src/a.js'), 'abdecf', aSourceMap, false, [bSource]);
|
||||
|
||||
// These flattened mappings are just the mappings from a to b.
|
||||
// (The mappings to c are dropped since there is no source file to map to.)
|
||||
expect(removeOriginalSegmentLinks(aSource.flattenedMappings))
|
||||
.toEqual(parseMappings(aSourceMap, [bSource], [0, 7]));
|
||||
});
|
||||
|
||||
/**
|
||||
* Clean out the links between original segments of each of the given `mappings`.
|
||||
*
|
||||
* @param mappings the mappings whose segments are to be cleaned.
|
||||
*/
|
||||
function removeOriginalSegmentLinks(mappings: Mapping[]) {
|
||||
for (const mapping of mappings) {
|
||||
mapping.originalSegment.next = undefined;
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
});
|
||||
|
||||
describe('renderFlattenedSourceMap()', () => {
|
||||
it('should convert the flattenedMappings into a raw source-map object', () => {
|
||||
const cSource = new SourceFile(_('/foo/src/c.js'), 'bcd123e', null, false, []);
|
||||
const bToCSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[1, 0, 0, 0], [4, 0, 0, 3], [4, 0, 0, 6], [5, 0, 0, 7]]]),
|
||||
names: [],
|
||||
sources: ['c.js'],
|
||||
version: 3
|
||||
};
|
||||
const bSource =
|
||||
new SourceFile(_('/foo/src/b.js'), 'abcdef', bToCSourceMap, false, [cSource]);
|
||||
const aToBSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]]]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
};
|
||||
const aSource =
|
||||
new SourceFile(_('/foo/src/a.js'), 'abdecf', aToBSourceMap, false, [bSource]);
|
||||
|
||||
const aTocSourceMap = aSource.renderFlattenedSourceMap();
|
||||
expect(aTocSourceMap.version).toEqual(3);
|
||||
expect(aTocSourceMap.file).toEqual('a.js');
|
||||
expect(aTocSourceMap.names).toEqual([]);
|
||||
expect(aTocSourceMap.sourceRoot).toBeUndefined();
|
||||
expect(aTocSourceMap.sources).toEqual(['c.js']);
|
||||
expect(aTocSourceMap.sourcesContent).toEqual(['bcd123e']);
|
||||
expect(aTocSourceMap.mappings).toEqual(encode([
|
||||
[[1, 0, 0, 0], [2, 0, 0, 2], [3, 0, 0, 3], [3, 0, 0, 6], [4, 0, 0, 1], [5, 0, 0, 7]]
|
||||
]));
|
||||
});
|
||||
|
||||
it('should handle mappings that map from lines outside of the actual content lines', () => {
|
||||
const bSource = new SourceFile(_('/foo/src/b.js'), 'abcdef', null, false, []);
|
||||
const aToBSourceMap: RawSourceMap = {
|
||||
mappings: encode([
|
||||
[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]],
|
||||
[
|
||||
[0, 0, 0, 0], // Extra mapping from a non-existent line
|
||||
]
|
||||
]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
};
|
||||
const aSource =
|
||||
new SourceFile(_('/foo/src/a.js'), 'abdecf', aToBSourceMap, false, [bSource]);
|
||||
|
||||
const aTocSourceMap = aSource.renderFlattenedSourceMap();
|
||||
expect(aTocSourceMap.version).toEqual(3);
|
||||
expect(aTocSourceMap.file).toEqual('a.js');
|
||||
expect(aTocSourceMap.names).toEqual([]);
|
||||
expect(aTocSourceMap.sourceRoot).toBeUndefined();
|
||||
expect(aTocSourceMap.sources).toEqual(['b.js']);
|
||||
expect(aTocSourceMap.sourcesContent).toEqual(['abcdef']);
|
||||
expect(aTocSourceMap.mappings).toEqual(aToBSourceMap.mappings);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('computeStartOfLinePositions()', () => {
|
||||
it('should compute the cumulative length of each line in the given string', () => {
|
||||
expect(computeStartOfLinePositions('')).toEqual([0]);
|
||||
expect(computeStartOfLinePositions('abc')).toEqual([0]);
|
||||
expect(computeStartOfLinePositions('\n')).toEqual([0, 1]);
|
||||
expect(computeStartOfLinePositions('\n\n')).toEqual([0, 1, 2]);
|
||||
expect(computeStartOfLinePositions('abc\n')).toEqual([0, 4]);
|
||||
expect(computeStartOfLinePositions('\nabc')).toEqual([0, 1]);
|
||||
expect(computeStartOfLinePositions('abc\ndefg')).toEqual([0, 4]);
|
||||
expect(computeStartOfLinePositions('abc\r\n')).toEqual([0, 4]);
|
||||
expect(computeStartOfLinePositions('abc\r\ndefg')).toEqual([0, 4]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
Reference in New Issue
Block a user