feat(ngcc): implement source-map flattening (#35132)
The library used by ngcc to update the source files (MagicString) is able to generate a source-map but it is not able to account for any previous source-map that the input text is already associated with. There have been various attempts to fix this but none have been very successful, since it is not a trivial problem to solve. This commit contains a novel approach that is able to load up a tree of source-files connected by source-maps and flatten them down into a single source-map that maps directly from the final generated file to the original sources referenced by the intermediate source-maps. PR Close #35132
This commit is contained in:
parent
2a8dd4758c
commit
df816c9c80
|
@ -122,6 +122,7 @@
|
|||
"shelljs": "^0.8.3",
|
||||
"source-map": "^0.6.1",
|
||||
"source-map-support": "0.5.9",
|
||||
"sourcemap-codec": "^1.4.8",
|
||||
"systemjs": "0.18.10",
|
||||
"terser": "^4.4.0",
|
||||
"tsickle": "0.38.0",
|
||||
|
|
|
@ -35,6 +35,7 @@ ts_library(
|
|||
"@npm//magic-string",
|
||||
"@npm//semver",
|
||||
"@npm//source-map",
|
||||
"@npm//sourcemap-codec",
|
||||
"@npm//typescript",
|
||||
],
|
||||
)
|
||||
|
|
|
@ -20,7 +20,7 @@ import {EntryPointBundle} from '../packages/entry_point_bundle';
|
|||
import {Logger} from '../logging/logger';
|
||||
import {FileToWrite, getImportRewriter} from './utils';
|
||||
import {RenderingFormatter} from './rendering_formatter';
|
||||
import {extractSourceMap, renderSourceAndMap} from './source_maps';
|
||||
import {renderSourceAndMap} from './source_maps';
|
||||
|
||||
/**
|
||||
* A structure that captures information about what needs to be rendered
|
||||
|
@ -81,8 +81,7 @@ export class DtsRenderer {
|
|||
}
|
||||
|
||||
renderDtsFile(dtsFile: ts.SourceFile, renderInfo: DtsRenderInfo): FileToWrite[] {
|
||||
const input = extractSourceMap(this.fs, this.logger, dtsFile);
|
||||
const outputText = new MagicString(input.source);
|
||||
const outputText = new MagicString(dtsFile.text);
|
||||
const printer = ts.createPrinter();
|
||||
const importManager = new ImportManager(
|
||||
getImportRewriter(this.bundle.dts !.r3SymbolsFile, this.bundle.isCore, false),
|
||||
|
@ -112,7 +111,7 @@ export class DtsRenderer {
|
|||
this.dtsFormatter.addImports(
|
||||
outputText, importManager.getAllImports(dtsFile.fileName), dtsFile);
|
||||
|
||||
return renderSourceAndMap(dtsFile, input, outputText);
|
||||
return renderSourceAndMap(this.fs, dtsFile, outputText);
|
||||
}
|
||||
|
||||
private getTypingsFilesToRender(
|
||||
|
|
|
@ -18,7 +18,7 @@ import {NgccReflectionHost} from '../host/ngcc_host';
|
|||
import {Logger} from '../logging/logger';
|
||||
import {EntryPointBundle} from '../packages/entry_point_bundle';
|
||||
import {RenderingFormatter, RedundantDecoratorMap} from './rendering_formatter';
|
||||
import {extractSourceMap, renderSourceAndMap} from './source_maps';
|
||||
import {renderSourceAndMap} from './source_maps';
|
||||
import {FileToWrite, getImportRewriter, stripExtension} from './utils';
|
||||
|
||||
/**
|
||||
|
@ -61,8 +61,7 @@ export class Renderer {
|
|||
switchMarkerAnalysis: SwitchMarkerAnalysis|undefined,
|
||||
privateDeclarationsAnalyses: PrivateDeclarationsAnalyses): FileToWrite[] {
|
||||
const isEntryPoint = sourceFile === this.bundle.src.file;
|
||||
const input = extractSourceMap(this.fs, this.logger, sourceFile);
|
||||
const outputText = new MagicString(input.source);
|
||||
const outputText = new MagicString(sourceFile.text);
|
||||
|
||||
if (switchMarkerAnalysis) {
|
||||
this.srcFormatter.rewriteSwitchableDeclarations(
|
||||
|
@ -115,7 +114,7 @@ export class Renderer {
|
|||
}
|
||||
|
||||
if (compiledFile || switchMarkerAnalysis || isEntryPoint) {
|
||||
return renderSourceAndMap(sourceFile, input, outputText);
|
||||
return renderSourceAndMap(this.fs, sourceFile, outputText);
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
|
|
|
@ -5,13 +5,13 @@
|
|||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
import {SourceMapConverter, commentRegex, fromJSON, fromObject, fromSource, generateMapFileComment, mapFileCommentRegex, removeComments, removeMapFileComments} from 'convert-source-map';
|
||||
import {SourceMapConverter, fromObject, generateMapFileComment} from 'convert-source-map';
|
||||
import MagicString from 'magic-string';
|
||||
import {RawSourceMap, SourceMapConsumer, SourceMapGenerator} from 'source-map';
|
||||
import * as ts from 'typescript';
|
||||
import {resolve, FileSystem, absoluteFromSourceFile, dirname, basename, absoluteFrom} from '../../../src/ngtsc/file_system';
|
||||
import {Logger} from '../logging/logger';
|
||||
import {FileSystem, absoluteFromSourceFile, basename, absoluteFrom} from '../../../src/ngtsc/file_system';
|
||||
import {FileToWrite} from './utils';
|
||||
import {SourceFileLoader} from '../sourcemaps/source_file_loader';
|
||||
import {RawSourceMap} from '../sourcemaps/raw_source_map';
|
||||
|
||||
export interface SourceMapInfo {
|
||||
source: string;
|
||||
|
@ -19,117 +19,33 @@ export interface SourceMapInfo {
|
|||
isInline: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the map from the source (note whether it is inline or external)
|
||||
*/
|
||||
export function extractSourceMap(
|
||||
fs: FileSystem, logger: Logger, file: ts.SourceFile): SourceMapInfo {
|
||||
const inline = commentRegex.test(file.text);
|
||||
const external = mapFileCommentRegex.exec(file.text);
|
||||
|
||||
if (inline) {
|
||||
const inlineSourceMap = fromSource(file.text);
|
||||
return {
|
||||
source: removeComments(file.text).replace(/\n\n$/, '\n'),
|
||||
map: inlineSourceMap,
|
||||
isInline: true,
|
||||
};
|
||||
} else if (external) {
|
||||
let externalSourceMap: SourceMapConverter|null = null;
|
||||
try {
|
||||
const fileName = external[1] || external[2];
|
||||
const filePath = resolve(dirname(absoluteFromSourceFile(file)), fileName);
|
||||
const mappingFile = fs.readFile(filePath);
|
||||
externalSourceMap = fromJSON(mappingFile);
|
||||
} catch (e) {
|
||||
if (e.code === 'ENOENT') {
|
||||
logger.warn(
|
||||
`The external map file specified in the source code comment "${e.path}" was not found on the file system.`);
|
||||
const mapPath = absoluteFrom(file.fileName + '.map');
|
||||
if (basename(e.path) !== basename(mapPath) && fs.exists(mapPath) &&
|
||||
fs.stat(mapPath).isFile()) {
|
||||
logger.warn(
|
||||
`Guessing the map file name from the source file name: "${basename(mapPath)}"`);
|
||||
try {
|
||||
externalSourceMap = fromObject(JSON.parse(fs.readFile(mapPath)));
|
||||
} catch (e) {
|
||||
logger.error(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
source: removeMapFileComments(file.text).replace(/\n\n$/, '\n'),
|
||||
map: externalSourceMap,
|
||||
isInline: false,
|
||||
};
|
||||
} else {
|
||||
return {source: file.text, map: null, isInline: false};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge the input and output source-maps, replacing the source-map comment in the output file
|
||||
* with an appropriate source-map comment pointing to the merged source-map.
|
||||
*/
|
||||
export function renderSourceAndMap(
|
||||
sourceFile: ts.SourceFile, input: SourceMapInfo, output: MagicString): FileToWrite[] {
|
||||
const outputPath = absoluteFromSourceFile(sourceFile);
|
||||
const outputMapPath = absoluteFrom(`${outputPath}.map`);
|
||||
const relativeSourcePath = basename(outputPath);
|
||||
const relativeMapPath = `${relativeSourcePath}.map`;
|
||||
fs: FileSystem, sourceFile: ts.SourceFile, generatedMagicString: MagicString): FileToWrite[] {
|
||||
const generatedPath = absoluteFromSourceFile(sourceFile);
|
||||
const generatedMapPath = absoluteFrom(`${generatedPath}.map`);
|
||||
const generatedContent = generatedMagicString.toString();
|
||||
const generatedMap: RawSourceMap = generatedMagicString.generateMap(
|
||||
{file: generatedPath, source: generatedPath, includeContent: true});
|
||||
|
||||
const outputMap = output.generateMap({
|
||||
source: outputPath,
|
||||
includeContent: true,
|
||||
// hires: true // TODO: This results in accurate but huge sourcemaps. Instead we should fix
|
||||
// the merge algorithm.
|
||||
});
|
||||
const loader = new SourceFileLoader(fs);
|
||||
const generatedFile = loader.loadSourceFile(
|
||||
generatedPath, generatedContent, {map: generatedMap, mapPath: generatedMapPath});
|
||||
|
||||
// we must set this after generation as magic string does "manipulation" on the path
|
||||
outputMap.file = relativeSourcePath;
|
||||
const rawMergedMap: RawSourceMap = generatedFile.renderFlattenedSourceMap();
|
||||
const mergedMap = fromObject(rawMergedMap);
|
||||
|
||||
const mergedMap =
|
||||
mergeSourceMaps(input.map && input.map.toObject(), JSON.parse(outputMap.toString()));
|
||||
|
||||
const result: FileToWrite[] = [];
|
||||
if (input.isInline) {
|
||||
result.push({path: outputPath, contents: `${output.toString()}\n${mergedMap.toComment()}`});
|
||||
if (generatedFile.sources[0]?.inline) {
|
||||
// The input source-map was inline so make the output one inline too.
|
||||
return [{path: generatedPath, contents: `${generatedFile.contents}\n${mergedMap.toComment()}`}];
|
||||
} else {
|
||||
result.push({
|
||||
path: outputPath,
|
||||
contents: `${output.toString()}\n${generateMapFileComment(relativeMapPath)}`
|
||||
});
|
||||
result.push({path: outputMapPath, contents: mergedMap.toJSON()});
|
||||
const sourceMapComment = generateMapFileComment(`${basename(generatedPath)}.map`);
|
||||
return [
|
||||
{path: generatedPath, contents: `${generatedFile.contents}\n${sourceMapComment}`},
|
||||
{path: generatedMapPath, contents: mergedMap.toJSON()}
|
||||
];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Merge the two specified source-maps into a single source-map that hides the intermediate
|
||||
* source-map.
|
||||
* E.g. Consider these mappings:
|
||||
*
|
||||
* ```
|
||||
* OLD_SRC -> OLD_MAP -> INTERMEDIATE_SRC -> NEW_MAP -> NEW_SRC
|
||||
* ```
|
||||
*
|
||||
* this will be replaced with:
|
||||
*
|
||||
* ```
|
||||
* OLD_SRC -> MERGED_MAP -> NEW_SRC
|
||||
* ```
|
||||
*/
|
||||
export function mergeSourceMaps(
|
||||
oldMap: RawSourceMap | null, newMap: RawSourceMap): SourceMapConverter {
|
||||
if (!oldMap) {
|
||||
return fromObject(newMap);
|
||||
}
|
||||
const oldMapConsumer = new SourceMapConsumer(oldMap);
|
||||
const newMapConsumer = new SourceMapConsumer(newMap);
|
||||
const mergedMapGenerator = SourceMapGenerator.fromSourceMap(newMapConsumer);
|
||||
mergedMapGenerator.applySourceMap(oldMapConsumer);
|
||||
const merged = fromJSON(mergedMapGenerator.toString());
|
||||
return merged;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
/**
|
||||
* This interface is the basic structure of the JSON in a raw source map that one might load from
|
||||
* disk.
|
||||
*/
|
||||
export interface RawSourceMap {
|
||||
version: number|string;
|
||||
file?: string;
|
||||
sourceRoot?: string;
|
||||
sources: string[];
|
||||
names: string[];
|
||||
sourcesContent?: (string|null)[];
|
||||
mappings: string;
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* A marker that indicates the start of a segment in a mapping.
|
||||
*
|
||||
* The end of a segment is indicated by the the first segment-marker of another mapping whose start
|
||||
* is greater or equal to this one.
|
||||
*/
|
||||
export interface SegmentMarker {
|
||||
readonly line: number;
|
||||
readonly column: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare two segment-markers, for use in a search or sorting algorithm.
|
||||
*
|
||||
* @returns a positive number if `a` is after `b`, a negative number if `b` is after `a`
|
||||
* and zero if they are at the same position.
|
||||
*/
|
||||
export function compareSegments(a: SegmentMarker, b: SegmentMarker): number {
|
||||
return a.line === b.line ? a.column - b.column : a.line - b.line;
|
||||
}
|
||||
|
||||
// The `1` is to indicate a newline character between the lines.
|
||||
// Note that in the actual contents there could be more than one character that indicates a newline
|
||||
// - e.g. \r\n - but that is not important here since segment-markers are in line/column pairs and
|
||||
// so differences in length due to extra `\r` characters do not affect the algorithms.
|
||||
const NEWLINE_MARKER_OFFSET = 1;
|
||||
|
||||
/**
|
||||
* Compute the difference between two segment markers in a source file.
|
||||
*
|
||||
* @param lineLengths the lengths of each line of content of the source file where we are computing
|
||||
* the difference
|
||||
* @param a the start marker
|
||||
* @param b the end marker
|
||||
* @returns the number of characters between the two segments `a` and `b`
|
||||
*/
|
||||
export function segmentDiff(lineLengths: number[], a: SegmentMarker, b: SegmentMarker) {
|
||||
let diff = b.column - a.column;
|
||||
|
||||
// Deal with `a` being before `b`
|
||||
for (let lineIndex = a.line; lineIndex < b.line; lineIndex++) {
|
||||
diff += lineLengths[lineIndex] + NEWLINE_MARKER_OFFSET;
|
||||
}
|
||||
|
||||
// Deal with `a` being after `b`
|
||||
for (let lineIndex = a.line - 1; lineIndex >= b.line; lineIndex--) {
|
||||
// The `+ 1` is the newline character between the lines
|
||||
diff -= lineLengths[lineIndex] + NEWLINE_MARKER_OFFSET;
|
||||
}
|
||||
return diff;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a new segment-marker that is offset by the given number of characters.
|
||||
*
|
||||
* @param lineLengths The length of each line in the source file whose segment-marker we are
|
||||
* offsetting.
|
||||
* @param marker The segment to offset.
|
||||
* @param offset The number of character to offset by.
|
||||
*/
|
||||
export function offsetSegment(lineLengths: number[], marker: SegmentMarker, offset: number) {
|
||||
if (offset === 0) {
|
||||
return marker;
|
||||
}
|
||||
|
||||
let line = marker.line;
|
||||
let column = marker.column + offset;
|
||||
|
||||
while (line < lineLengths.length - 1 && column > lineLengths[line]) {
|
||||
column -= lineLengths[line] + NEWLINE_MARKER_OFFSET;
|
||||
line++;
|
||||
}
|
||||
while (line > 0 && column < 0) {
|
||||
line--;
|
||||
column += lineLengths[line] + NEWLINE_MARKER_OFFSET;
|
||||
}
|
||||
|
||||
return {line, column};
|
||||
}
|
|
@ -0,0 +1,313 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
import {removeComments, removeMapFileComments} from 'convert-source-map';
|
||||
import {SourceMapMappings, SourceMapSegment, decode, encode} from 'sourcemap-codec';
|
||||
import {AbsoluteFsPath, dirname, relative} from '../../../src/ngtsc/file_system';
|
||||
import {RawSourceMap} from './raw_source_map';
|
||||
import {SegmentMarker, compareSegments, offsetSegment, segmentDiff} from './segment_marker';
|
||||
|
||||
export function removeSourceMapComments(contents: string): string {
|
||||
return removeMapFileComments(removeComments(contents)).replace(/\n\n$/, '\n');
|
||||
}
|
||||
|
||||
export class SourceFile {
|
||||
/**
|
||||
* The parsed mappings that have been flattened so that any intermediate source mappings have been
|
||||
* flattened.
|
||||
*
|
||||
* The result is that any source file mentioned in the flattened mappings have no source map (are
|
||||
* pure original source files).
|
||||
*/
|
||||
readonly flattenedMappings: Mapping[];
|
||||
readonly lineLengths: number[];
|
||||
|
||||
constructor(
|
||||
/** The path to this source file. */
|
||||
readonly sourcePath: AbsoluteFsPath,
|
||||
/** The contents of this source file. */
|
||||
readonly contents: string,
|
||||
/** The raw source map (if any) associated with this source file. */
|
||||
readonly rawMap: RawSourceMap|null,
|
||||
/** Whether this source file's source map was inline or external. */
|
||||
readonly inline: boolean,
|
||||
/** Any source files referenced by the raw source map associated with this source file. */
|
||||
readonly sources: (SourceFile|null)[]) {
|
||||
this.contents = removeSourceMapComments(contents);
|
||||
this.lineLengths = computeLineLengths(this.contents);
|
||||
this.flattenedMappings = this.flattenMappings();
|
||||
}
|
||||
|
||||
/**
|
||||
* Render the raw source map generated from the flattened mappings.
|
||||
*/
|
||||
renderFlattenedSourceMap(): RawSourceMap {
|
||||
const sources: SourceFile[] = [];
|
||||
const names: string[] = [];
|
||||
|
||||
// Ensure a mapping line array for each line in the generated source.
|
||||
const mappings: SourceMapMappings = this.lineLengths.map(() => []);
|
||||
|
||||
for (const mapping of this.flattenedMappings) {
|
||||
const mappingLine = mappings[mapping.generatedSegment.line];
|
||||
const sourceIndex = findIndexOrAdd(sources, mapping.originalSource);
|
||||
const mappingArray: SourceMapSegment = [
|
||||
mapping.generatedSegment.column,
|
||||
sourceIndex,
|
||||
mapping.originalSegment.line,
|
||||
mapping.originalSegment.column,
|
||||
];
|
||||
if (mapping.name !== undefined) {
|
||||
const nameIndex = findIndexOrAdd(names, mapping.name);
|
||||
mappingArray.push(nameIndex);
|
||||
}
|
||||
mappingLine.push(mappingArray);
|
||||
}
|
||||
|
||||
const sourcePathDir = dirname(this.sourcePath);
|
||||
const sourceMap: RawSourceMap = {
|
||||
version: 3,
|
||||
file: relative(sourcePathDir, this.sourcePath),
|
||||
sources: sources.map(sf => relative(sourcePathDir, sf.sourcePath)), names,
|
||||
mappings: encode(mappings),
|
||||
sourcesContent: sources.map(sf => sf.contents),
|
||||
};
|
||||
return sourceMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Flatten the parsed mappings for this source file, so that all the mappings are to pure original
|
||||
* source files with no transitive source maps.
|
||||
*/
|
||||
private flattenMappings(): Mapping[] {
|
||||
const mappings = parseMappings(this.rawMap, this.sources);
|
||||
const originalSegments = extractOriginalSegments(mappings);
|
||||
const flattenedMappings: Mapping[] = [];
|
||||
for (let mappingIndex = 0; mappingIndex < mappings.length; mappingIndex++) {
|
||||
const aToBmapping = mappings[mappingIndex];
|
||||
const bSource = aToBmapping.originalSource;
|
||||
if (bSource.flattenedMappings.length === 0) {
|
||||
// The b source file has no mappings of its own (i.e. it is a pure original file)
|
||||
// so just use the mapping as-is.
|
||||
flattenedMappings.push(aToBmapping);
|
||||
continue;
|
||||
}
|
||||
|
||||
// The `incomingStart` and `incomingEnd` are the `SegmentMarker`s in `B` that represent the
|
||||
// section of `B` source file that is being mapped to by the current `aToBmapping`.
|
||||
//
|
||||
// For example, consider the mappings from A to B:
|
||||
//
|
||||
// src A src B mapping
|
||||
//
|
||||
// a ----- a [0, 0]
|
||||
// b b
|
||||
// f - /- c [4, 2]
|
||||
// g \ / d
|
||||
// c -/\ e
|
||||
// d \- f [2, 5]
|
||||
// e
|
||||
//
|
||||
// For mapping [0,0] the incoming start and end are 0 and 2 (i.e. the range a, b, c)
|
||||
// For mapping [4,2] the incoming start and end are 2 and 5 (i.e. the range c, d, e, f)
|
||||
//
|
||||
const incomingStart = aToBmapping.originalSegment;
|
||||
const incomingEndIndex = originalSegments.indexOf(incomingStart) + 1;
|
||||
const incomingEnd = incomingEndIndex < originalSegments.length ?
|
||||
originalSegments[incomingEndIndex] :
|
||||
undefined;
|
||||
|
||||
// The `outgoingStartIndex` and `outgoingEndIndex` are the indices of the range of mappings
|
||||
// that leave `b` that we are interested in merging with the aToBmapping.
|
||||
// We actually care about all the markers from the last bToCmapping directly before the
|
||||
// `incomingStart` to the last bToCmaping directly before the `incomingEnd`, inclusive.
|
||||
//
|
||||
// For example, if we consider the range 2 to 5 from above (i.e. c, d, e, f) with the
|
||||
// following mappings from B to C:
|
||||
//
|
||||
// src B src C mapping
|
||||
// a
|
||||
// b ----- b [1, 0]
|
||||
// - c c
|
||||
// | d d
|
||||
// | e ----- 1 [4, 3]
|
||||
// - f \ 2
|
||||
// \ 3
|
||||
// \- e [4, 6]
|
||||
//
|
||||
// The range with `incomingStart` at 2 and `incomingEnd` at 5 has outgoing start mapping of
|
||||
// [1,0] and outgoing end mapping of [4, 6], which also includes [4, 3].
|
||||
//
|
||||
let outgoingStartIndex = findLastIndex(
|
||||
bSource.flattenedMappings,
|
||||
mapping => compareSegments(mapping.generatedSegment, incomingStart) <= 0);
|
||||
if (outgoingStartIndex < 0) {
|
||||
outgoingStartIndex = 0;
|
||||
}
|
||||
const outgoingEndIndex = incomingEnd !== undefined ?
|
||||
findLastIndex(
|
||||
bSource.flattenedMappings,
|
||||
mapping => compareSegments(mapping.generatedSegment, incomingEnd) < 0) :
|
||||
bSource.flattenedMappings.length - 1;
|
||||
|
||||
for (let bToCmappingIndex = outgoingStartIndex; bToCmappingIndex <= outgoingEndIndex;
|
||||
bToCmappingIndex++) {
|
||||
const bToCmapping: Mapping = bSource.flattenedMappings[bToCmappingIndex];
|
||||
flattenedMappings.push(mergeMappings(this, aToBmapping, bToCmapping));
|
||||
}
|
||||
}
|
||||
return flattenedMappings;
|
||||
}
|
||||
}
|
||||
|
||||
function findLastIndex<T>(items: T[], predicate: (item: T) => boolean): number {
|
||||
for (let index = items.length - 1; index >= 0; index--) {
|
||||
if (predicate(items[index])) {
|
||||
return index;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* A Mapping consists of two segment markers: one in the generated source and one in the original
|
||||
* source, which indicate the start of each segment. The end of a segment is indicated by the first
|
||||
* segment marker of another mapping whose start is greater or equal to this one.
|
||||
*
|
||||
* It may also include a name associated with the segment being mapped.
|
||||
*/
|
||||
export interface Mapping {
|
||||
readonly generatedSegment: SegmentMarker;
|
||||
readonly originalSource: SourceFile;
|
||||
readonly originalSegment: SegmentMarker;
|
||||
readonly name?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the index of `item` in the `items` array.
|
||||
* If it is not found, then push `item` to the end of the array and return its new index.
|
||||
*
|
||||
* @param items the collection in which to look for `item`.
|
||||
* @param item the item to look for.
|
||||
* @returns the index of the `item` in the `items` array.
|
||||
*/
|
||||
function findIndexOrAdd<T>(items: T[], item: T): number {
|
||||
const itemIndex = items.indexOf(item);
|
||||
if (itemIndex > -1) {
|
||||
return itemIndex;
|
||||
} else {
|
||||
items.push(item);
|
||||
return items.length - 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Merge two mappings that go from A to B and B to C, to result in a mapping that goes from A to C.
|
||||
*/
|
||||
export function mergeMappings(generatedSource: SourceFile, ab: Mapping, bc: Mapping): Mapping {
|
||||
const name = bc.name || ab.name;
|
||||
|
||||
// We need to modify the segment-markers of the new mapping to take into account the shifts that
|
||||
// occur due to the combination of the two mappings.
|
||||
// For example:
|
||||
|
||||
// * Simple map where the B->C starts at the same place the A->B ends:
|
||||
//
|
||||
// ```
|
||||
// A: 1 2 b c d
|
||||
// | A->B [2,0]
|
||||
// | |
|
||||
// B: b c d A->C [2,1]
|
||||
// | |
|
||||
// | B->C [0,1]
|
||||
// C: a b c d e
|
||||
// ```
|
||||
|
||||
// * More complicated case where diffs of segment-markers is needed:
|
||||
//
|
||||
// ```
|
||||
// A: b 1 2 c d
|
||||
// \
|
||||
// | A->B [0,1*] [0,1*]
|
||||
// | | |+3
|
||||
// B: a b 1 2 c d A->C [0,1] [3,2]
|
||||
// | / |+1 |
|
||||
// | / B->C [0*,0] [4*,2]
|
||||
// | /
|
||||
// C: a b c d e
|
||||
// ```
|
||||
//
|
||||
// `[0,1]` mapping from A->C:
|
||||
// The difference between the "original segment-marker" of A->B (1*) and the "generated
|
||||
// segment-marker of B->C (0*): `1 - 0 = +1`.
|
||||
// Since it is positive we must increment the "original segment-marker" with `1` to give [0,1].
|
||||
//
|
||||
// `[3,2]` mapping from A->C:
|
||||
// The difference between the "original segment-marker" of A->B (1*) and the "generated
|
||||
// segment-marker" of B->C (4*): `1 - 4 = -3`.
|
||||
// Since it is negative we must increment the "generated segment-marker" with `3` to give [3,2].
|
||||
|
||||
const diff = segmentDiff(ab.originalSource.lineLengths, ab.originalSegment, bc.generatedSegment);
|
||||
if (diff > 0) {
|
||||
return {
|
||||
name,
|
||||
generatedSegment: offsetSegment(generatedSource.lineLengths, ab.generatedSegment, diff),
|
||||
originalSource: bc.originalSource,
|
||||
originalSegment: bc.originalSegment,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
name,
|
||||
generatedSegment: ab.generatedSegment,
|
||||
originalSource: bc.originalSource,
|
||||
originalSegment: offsetSegment(bc.originalSource.lineLengths, bc.originalSegment, -diff),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the `rawMappings` into an array of parsed mappings, which reference source-files provided
|
||||
* in the `sources` parameter.
|
||||
*/
|
||||
export function parseMappings(
|
||||
rawMap: RawSourceMap | null, sources: (SourceFile | null)[]): Mapping[] {
|
||||
if (rawMap === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const rawMappings = decode(rawMap.mappings);
|
||||
if (rawMappings === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const mappings: Mapping[] = [];
|
||||
for (let generatedLine = 0; generatedLine < rawMappings.length; generatedLine++) {
|
||||
const generatedLineMappings = rawMappings[generatedLine];
|
||||
for (const rawMapping of generatedLineMappings) {
|
||||
if (rawMapping.length >= 4) {
|
||||
const generatedColumn = rawMapping[0];
|
||||
const name = rawMapping.length === 5 ? rawMap.names[rawMapping[4]] : undefined;
|
||||
const mapping: Mapping = {
|
||||
generatedSegment: {line: generatedLine, column: generatedColumn},
|
||||
originalSource: sources[rawMapping[1] !] !,
|
||||
originalSegment: {line: rawMapping[2] !, column: rawMapping[3] !}, name
|
||||
};
|
||||
mappings.push(mapping);
|
||||
}
|
||||
}
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
|
||||
export function extractOriginalSegments(mappings: Mapping[]): SegmentMarker[] {
|
||||
return mappings.map(mapping => mapping.originalSegment).sort(compareSegments);
|
||||
}
|
||||
|
||||
export function computeLineLengths(str: string): number[] {
|
||||
return (str.split(/\r?\n/)).map(s => s.length);
|
||||
}
|
|
@ -0,0 +1,142 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
import {commentRegex, fromComment, mapFileCommentRegex} from 'convert-source-map';
|
||||
import {AbsoluteFsPath, FileSystem, absoluteFrom} from '../../../src/ngtsc/file_system';
|
||||
import {RawSourceMap} from './raw_source_map';
|
||||
import {SourceFile} from './source_file';
|
||||
|
||||
/**
|
||||
* This class can be used to load a source file, its associated source map and any upstream sources.
|
||||
*
|
||||
* Since a source file might reference (or include) a source map, this class can load those too.
|
||||
* Since a source map might reference other source files, these are also loaded as needed.
|
||||
*
|
||||
* This is done recursively. The result is a "tree" of `SourceFile` objects, each containing
|
||||
* mappings to other `SourceFile` objects as necessary.
|
||||
*/
|
||||
export class SourceFileLoader {
|
||||
constructor(private fs: FileSystem) {}
|
||||
|
||||
/**
|
||||
* Load a source file, compute its source map, and recursively load any referenced source files.
|
||||
*
|
||||
* @param sourcePath The path to the source file to load.
|
||||
* @param contents The contents of the source file to load (if known).
|
||||
* The contents may be known because the source file was inlined into a source map.
|
||||
* If it is not known the contents will be read from the file at the `sourcePath`.
|
||||
* @param mapAndPath The raw source-map and the path to the source-map file, if known.
|
||||
* @param previousPaths An internal parameter used for cyclic dependency tracking.
|
||||
* @returns a SourceFile if the content for one was provided or able to be loaded from disk,
|
||||
* `null` otherwise.
|
||||
*/
|
||||
loadSourceFile(sourcePath: AbsoluteFsPath, contents: string, mapAndPath: MapAndPath): SourceFile;
|
||||
loadSourceFile(sourcePath: AbsoluteFsPath, contents: string|null): SourceFile|null;
|
||||
loadSourceFile(sourcePath: AbsoluteFsPath): SourceFile|null;
|
||||
loadSourceFile(
|
||||
sourcePath: AbsoluteFsPath, contents: string|null, mapAndPath: null,
|
||||
previousPaths: AbsoluteFsPath[]): SourceFile|null;
|
||||
loadSourceFile(
|
||||
sourcePath: AbsoluteFsPath, contents: string|null = null, mapAndPath: MapAndPath|null = null,
|
||||
previousPaths: AbsoluteFsPath[] = []): SourceFile|null {
|
||||
if (contents === null) {
|
||||
if (!this.fs.exists(sourcePath)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Track source file paths if we have loaded them from disk so that we don't get into an
|
||||
// infinite recursion
|
||||
if (previousPaths.includes(sourcePath)) {
|
||||
throw new Error(
|
||||
`Circular source file mapping dependency: ${previousPaths.join(' -> ')} -> ${sourcePath}`);
|
||||
}
|
||||
previousPaths = previousPaths.concat([sourcePath]);
|
||||
|
||||
contents = this.fs.readFile(sourcePath);
|
||||
}
|
||||
|
||||
// If not provided try to load the source map based on the source itself
|
||||
if (mapAndPath === null) {
|
||||
mapAndPath = this.loadSourceMap(sourcePath, contents);
|
||||
}
|
||||
|
||||
let map: RawSourceMap|null = null;
|
||||
let inline = true;
|
||||
let sources: (SourceFile | null)[] = [];
|
||||
if (mapAndPath !== null) {
|
||||
const basePath = mapAndPath.mapPath || sourcePath;
|
||||
sources = this.processSources(basePath, mapAndPath.map, previousPaths);
|
||||
map = mapAndPath.map;
|
||||
inline = mapAndPath.mapPath === null;
|
||||
}
|
||||
|
||||
return new SourceFile(sourcePath, contents, map, inline, sources);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the source map associated with the source file whose `sourcePath` and `contents` are
|
||||
* provided.
|
||||
*
|
||||
* Source maps can be inline, as part of a base64 encoded comment, or external as a separate file
|
||||
* whose path is indicated in a comment or implied from the name of the source file itself.
|
||||
*/
|
||||
private loadSourceMap(sourcePath: AbsoluteFsPath, contents: string): MapAndPath|null {
|
||||
const inline = commentRegex.exec(contents);
|
||||
if (inline !== null) {
|
||||
return {map: fromComment(inline.pop() !).sourcemap, mapPath: null};
|
||||
}
|
||||
|
||||
const external = mapFileCommentRegex.exec(contents);
|
||||
if (external) {
|
||||
try {
|
||||
const fileName = external[1] || external[2];
|
||||
const externalMapPath = this.fs.resolve(this.fs.dirname(sourcePath), fileName);
|
||||
return {map: this.loadRawSourceMap(externalMapPath), mapPath: externalMapPath};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const impliedMapPath = absoluteFrom(sourcePath + '.map');
|
||||
if (this.fs.exists(impliedMapPath)) {
|
||||
return {map: this.loadRawSourceMap(impliedMapPath), mapPath: impliedMapPath};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate over each of the "sources" for this source file's source map, recursively loading each
|
||||
* source file and its associated source map.
|
||||
*/
|
||||
private processSources(
|
||||
basePath: AbsoluteFsPath, map: RawSourceMap,
|
||||
previousPaths: AbsoluteFsPath[]): (SourceFile|null)[] {
|
||||
const sourceRoot = this.fs.resolve(this.fs.dirname(basePath), map.sourceRoot || '');
|
||||
return map.sources.map((source, index) => {
|
||||
const path = this.fs.resolve(sourceRoot, source);
|
||||
const content = map.sourcesContent && map.sourcesContent[index] || null;
|
||||
return this.loadSourceFile(path, content, null, previousPaths);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the source map from the file at `mapPath`, parsing its JSON contents into a `RawSourceMap`
|
||||
* object.
|
||||
*/
|
||||
private loadRawSourceMap(mapPath: AbsoluteFsPath): RawSourceMap {
|
||||
return JSON.parse(this.fs.readFile(mapPath));
|
||||
}
|
||||
}
|
||||
|
||||
/** A small helper structure that is returned from `loadSourceMap()`. */
|
||||
interface MapAndPath {
|
||||
/** The path to the source map if it was external or `null` if it was inline. */
|
||||
mapPath: AbsoluteFsPath|null;
|
||||
/** The raw source map itself. */
|
||||
map: RawSourceMap;
|
||||
}
|
|
@ -27,6 +27,7 @@ ts_library(
|
|||
"@npm//convert-source-map",
|
||||
"@npm//dependency-graph",
|
||||
"@npm//magic-string",
|
||||
"@npm//sourcemap-codec",
|
||||
"@npm//typescript",
|
||||
],
|
||||
)
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
import {Statement} from '@angular/compiler';
|
||||
import {SourceMapMappings, encode} from 'sourcemap-codec';
|
||||
import MagicString from 'magic-string';
|
||||
import * as ts from 'typescript';
|
||||
import {fromObject, generateMapFileComment, SourceMapConverter} from 'convert-source-map';
|
||||
|
@ -34,7 +35,7 @@ class TestRenderingFormatter implements RenderingFormatter {
|
|||
output.prepend('\n// ADD IMPORTS\n');
|
||||
}
|
||||
addExports(output: MagicString, baseEntryPointPath: string, exports: ExportInfo[]) {
|
||||
output.prepend('\n// ADD EXPORTS\n');
|
||||
output.prepend('\n// ADD EXPORTS\r\n');
|
||||
}
|
||||
addDirectExports(output: MagicString, exports: Reexport[]): void {
|
||||
output.prepend('\n// ADD DIRECT EXPORTS\n');
|
||||
|
@ -116,10 +117,11 @@ function createTestRenderer(
|
|||
runInEachFileSystem(() => {
|
||||
describe('Renderer', () => {
|
||||
let _: typeof absoluteFrom;
|
||||
let INPUT_PROGRAM: TestFile;
|
||||
let TS_CONTENT: TestFile;
|
||||
let JS_CONTENT: TestFile;
|
||||
let COMPONENT_PROGRAM: TestFile;
|
||||
let NGMODULE_PROGRAM: TestFile;
|
||||
let INPUT_PROGRAM_MAP: SourceMapConverter;
|
||||
let JS_CONTENT_MAP: SourceMapConverter;
|
||||
let RENDERED_CONTENTS: string;
|
||||
let OUTPUT_PROGRAM_MAP: SourceMapConverter;
|
||||
let MERGED_OUTPUT_PROGRAM_MAP: SourceMapConverter;
|
||||
|
@ -127,10 +129,16 @@ runInEachFileSystem(() => {
|
|||
beforeEach(() => {
|
||||
_ = absoluteFrom;
|
||||
|
||||
INPUT_PROGRAM = {
|
||||
TS_CONTENT = {
|
||||
name: _('/node_modules/test-package/src/file.ts'),
|
||||
contents:
|
||||
`import {Directive} from '@angular/core';\n@Directive({selector: '[a]'})\nexport class A {\n foo(x: number): number { return x; }\n}`
|
||||
};
|
||||
|
||||
JS_CONTENT = {
|
||||
name: _('/node_modules/test-package/src/file.js'),
|
||||
contents:
|
||||
`import { Directive } from '@angular/core';\nexport class A {\n foo(x) {\n return x;\n }\n}\nA.decorators = [\n { type: Directive, args: [{ selector: '[a]' }] }\n];\n`
|
||||
`import { Directive } from '@angular/core';\nexport class A {\n foo(x) {\r\n return x;\n }\r\n}\nA.decorators = [\n { type: Directive, args: [{ selector: '[a]' }] }\r\n];\n`
|
||||
};
|
||||
|
||||
COMPONENT_PROGRAM = {
|
||||
|
@ -145,62 +153,80 @@ runInEachFileSystem(() => {
|
|||
`import { NgModule } from '@angular/core';\nexport class A {}\nA.decorators = [\n { type: NgModule, args: [{}] }\n];\n`
|
||||
};
|
||||
|
||||
INPUT_PROGRAM_MAP = fromObject({
|
||||
const JS_CONTENT_MAPPINGS: SourceMapMappings = [
|
||||
[
|
||||
[0, 0, 0, 0], [7, 0, 0, 7], [9, 0, 0, 8], [18, 0, 0, 17], [20, 0, 0, 18], [26, 0, 0, 24],
|
||||
[41, 0, 0, 39], [42, 0, 0, 40]
|
||||
],
|
||||
[[0, 0, 2, 0], [4, 0, 2, 13], [5, 0, 2, 14], [8, 0, 2, 0], [14, 0, 2, 13], [15, 0, 2, 14]],
|
||||
[[4, 0, 3, 2], [7, 0, 3, 5], [8, 0, 3, 6], [9, 0, 3, 15]],
|
||||
[
|
||||
[0, 0, 3, 27], [7, 0, 3, 34], [8, 0, 3, 35], [9, 0, 3, 36], [10, 0, 3, 37],
|
||||
[11, 0, 3, 38], [1, 0, 4, 1], [2, 0, 4, 1]
|
||||
],
|
||||
[[0, 0, 2, 13], [1, 0, 2, 14]],
|
||||
[],
|
||||
[
|
||||
[2, 0, 1, 1], [11, 0, 1, 10], [12, 0, 1, 11], [14, 0, 1, 12], [3, 0, 2, 13],
|
||||
[4, 0, 2, 14], [5, 0, 4, 1]
|
||||
],
|
||||
[
|
||||
[5, 0, 1, 20], [7, 0, 1, 22], [12, 0, 1, 27], [14, 0, 1, 28], [15, 0, 1, 29],
|
||||
[9, 0, 2, 13], [10, 0, 2, 14]
|
||||
],
|
||||
[],
|
||||
];
|
||||
|
||||
JS_CONTENT_MAP = fromObject({
|
||||
'version': 3,
|
||||
'file': _('/node_modules/test-package/src/file.js'),
|
||||
'file': 'file.js',
|
||||
'sourceRoot': '',
|
||||
'sources': [_('/node_modules/test-package/src/file.ts')],
|
||||
'sources': ['file.ts'],
|
||||
'sourcesContent': [TS_CONTENT.contents],
|
||||
'names': [],
|
||||
'mappings':
|
||||
'AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,eAAe,CAAC;AAC1C,MAAM;IACF,GAAG,CAAC,CAAS;QACT,OAAO,CAAC,CAAC;IACb,CAAC;;AACM,YAAU,GAAG;IAChB,EAAE,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,CAAC,EAAE;CACnD,CAAC',
|
||||
'sourcesContent': [INPUT_PROGRAM.contents]
|
||||
'mappings': encode(JS_CONTENT_MAPPINGS),
|
||||
});
|
||||
|
||||
RENDERED_CONTENTS = `
|
||||
// ADD IMPORTS
|
||||
|
||||
// ADD EXPORTS
|
||||
|
||||
// ADD CONSTANTS
|
||||
|
||||
// ADD ADJACENT STATEMENTS
|
||||
|
||||
// ADD DEFINITIONS
|
||||
|
||||
// REMOVE DECORATORS
|
||||
` + INPUT_PROGRAM.contents;
|
||||
RENDERED_CONTENTS =
|
||||
`\n// ADD IMPORTS\n\n// ADD EXPORTS\r\n\n// ADD CONSTANTS\n\n// ADD ADJACENT STATEMENTS\n\n// ADD DEFINITIONS\n\n// REMOVE DECORATORS\n` +
|
||||
JS_CONTENT.contents;
|
||||
|
||||
OUTPUT_PROGRAM_MAP = fromObject({
|
||||
'version': 3,
|
||||
'file': 'file.js',
|
||||
'sources': [_('/node_modules/test-package/src/file.js')],
|
||||
'sourcesContent': [INPUT_PROGRAM.contents],
|
||||
'sources': ['file.js'],
|
||||
'names': [],
|
||||
'mappings': ';;;;;;;;;;;;AAAA;;;;;;;;;'
|
||||
'mappings': encode([
|
||||
[], [], [], [], [], [], [], [], [], [], [], [], [[0, 0, 0, 0]],
|
||||
[], [], [], [], [], [], [], [], []
|
||||
]),
|
||||
'sourcesContent': [JS_CONTENT.contents],
|
||||
});
|
||||
|
||||
const MERGED_OUTPUT_PROGRAM_MAPPINGS: SourceMapMappings =
|
||||
[[], [], [], [], [], [], [], [], [], [], [], [], ...JS_CONTENT_MAPPINGS, []];
|
||||
|
||||
MERGED_OUTPUT_PROGRAM_MAP = fromObject({
|
||||
'version': 3,
|
||||
'sources': [_('/node_modules/test-package/src/file.ts')],
|
||||
'names': [],
|
||||
'mappings': ';;;;;;;;;;;;AAAA',
|
||||
'file': 'file.js',
|
||||
'sourcesContent': [INPUT_PROGRAM.contents]
|
||||
'sources': ['file.ts'],
|
||||
'names': [],
|
||||
'mappings': encode(MERGED_OUTPUT_PROGRAM_MAPPINGS),
|
||||
'sourcesContent': [TS_CONTENT.contents],
|
||||
});
|
||||
});
|
||||
|
||||
describe('renderProgram()', () => {
|
||||
it('should render the modified contents; and a new map file, if the original provided no map file.',
|
||||
it('should render the modified contents; with an inline map file, if the original provided no map file.',
|
||||
() => {
|
||||
const {renderer, decorationAnalyses, switchMarkerAnalyses, privateDeclarationsAnalyses} =
|
||||
createTestRenderer('test-package', [INPUT_PROGRAM]);
|
||||
const result = renderer.renderProgram(
|
||||
createTestRenderer('test-package', [JS_CONTENT]);
|
||||
const [sourceFile, mapFile] = renderer.renderProgram(
|
||||
decorationAnalyses, switchMarkerAnalyses, privateDeclarationsAnalyses);
|
||||
expect(result[0].path).toEqual(_('/node_modules/test-package/src/file.js'));
|
||||
expect(result[0].contents)
|
||||
.toEqual(RENDERED_CONTENTS + '\n' + generateMapFileComment('file.js.map'));
|
||||
expect(result[1].path).toEqual(_('/node_modules/test-package/src/file.js.map'));
|
||||
expect(result[1].contents).toEqual(OUTPUT_PROGRAM_MAP.toJSON());
|
||||
expect(sourceFile.path).toEqual(_('/node_modules/test-package/src/file.js'));
|
||||
expect(sourceFile.contents)
|
||||
.toEqual(RENDERED_CONTENTS + '\n' + OUTPUT_PROGRAM_MAP.toComment());
|
||||
expect(mapFile).toBeUndefined();
|
||||
});
|
||||
|
||||
|
||||
|
@ -232,7 +258,7 @@ A.ɵcmp = ɵngcc0.ɵɵdefineComponent({ type: A, selectors: [["a"]], decls: 1, v
|
|||
it('should call addImports with the source code and info about the core Angular library.',
|
||||
() => {
|
||||
const {renderer, decorationAnalyses, switchMarkerAnalyses, privateDeclarationsAnalyses,
|
||||
testFormatter} = createTestRenderer('test-package', [INPUT_PROGRAM]);
|
||||
testFormatter} = createTestRenderer('test-package', [JS_CONTENT]);
|
||||
const result = renderer.renderProgram(
|
||||
decorationAnalyses, switchMarkerAnalyses, privateDeclarationsAnalyses);
|
||||
const addImportsSpy = testFormatter.addImports as jasmine.Spy;
|
||||
|
@ -245,7 +271,7 @@ A.ɵcmp = ɵngcc0.ɵɵdefineComponent({ type: A, selectors: [["a"]], decls: 1, v
|
|||
it('should call addDefinitions with the source code, the analyzed class and the rendered definitions.',
|
||||
() => {
|
||||
const {renderer, decorationAnalyses, switchMarkerAnalyses, privateDeclarationsAnalyses,
|
||||
testFormatter} = createTestRenderer('test-package', [INPUT_PROGRAM]);
|
||||
testFormatter} = createTestRenderer('test-package', [JS_CONTENT]);
|
||||
renderer.renderProgram(
|
||||
decorationAnalyses, switchMarkerAnalyses, privateDeclarationsAnalyses);
|
||||
const addDefinitionsSpy = testFormatter.addDefinitions as jasmine.Spy;
|
||||
|
@ -263,7 +289,7 @@ A.ɵdir = ɵngcc0.ɵɵdefineDirective({ type: A, selectors: [["", "a", ""]] });`
|
|||
it('should call addAdjacentStatements with the source code, the analyzed class and the rendered statements',
|
||||
() => {
|
||||
const {renderer, decorationAnalyses, switchMarkerAnalyses, privateDeclarationsAnalyses,
|
||||
testFormatter} = createTestRenderer('test-package', [INPUT_PROGRAM]);
|
||||
testFormatter} = createTestRenderer('test-package', [JS_CONTENT]);
|
||||
renderer.renderProgram(
|
||||
decorationAnalyses, switchMarkerAnalyses, privateDeclarationsAnalyses);
|
||||
const addAdjacentStatementsSpy = testFormatter.addAdjacentStatements as jasmine.Spy;
|
||||
|
@ -282,7 +308,7 @@ A.ɵdir = ɵngcc0.ɵɵdefineDirective({ type: A, selectors: [["", "a", ""]] });`
|
|||
it('should call removeDecorators with the source code, a map of class decorators that have been analyzed',
|
||||
() => {
|
||||
const {renderer, decorationAnalyses, switchMarkerAnalyses, privateDeclarationsAnalyses,
|
||||
testFormatter} = createTestRenderer('test-package', [INPUT_PROGRAM]);
|
||||
testFormatter} = createTestRenderer('test-package', [JS_CONTENT]);
|
||||
renderer.renderProgram(
|
||||
decorationAnalyses, switchMarkerAnalyses, privateDeclarationsAnalyses);
|
||||
const removeDecoratorsSpy = testFormatter.removeDecorators as jasmine.Spy;
|
||||
|
@ -295,7 +321,7 @@ A.ɵdir = ɵngcc0.ɵɵdefineDirective({ type: A, selectors: [["", "a", ""]] });`
|
|||
const keys = Array.from(map.keys());
|
||||
expect(keys.length).toEqual(1);
|
||||
expect(keys[0].getText())
|
||||
.toEqual(`[\n { type: Directive, args: [{ selector: '[a]' }] }\n]`);
|
||||
.toEqual(`[\n { type: Directive, args: [{ selector: '[a]' }] }\r\n]`);
|
||||
const values = Array.from(map.values());
|
||||
expect(values.length).toEqual(1);
|
||||
expect(values[0].length).toEqual(1);
|
||||
|
@ -493,7 +519,7 @@ UndecoratedBase.ɵdir = ɵngcc0.ɵɵdefineDirective({ type: UndecoratedBase, vie
|
|||
it('should call renderImports after other abstract methods', () => {
|
||||
// This allows the other methods to add additional imports if necessary
|
||||
const {renderer, decorationAnalyses, switchMarkerAnalyses, privateDeclarationsAnalyses,
|
||||
testFormatter} = createTestRenderer('test-package', [INPUT_PROGRAM]);
|
||||
testFormatter} = createTestRenderer('test-package', [JS_CONTENT]);
|
||||
const addExportsSpy = testFormatter.addExports as jasmine.Spy;
|
||||
const addDefinitionsSpy = testFormatter.addDefinitions as jasmine.Spy;
|
||||
const addAdjacentStatementsSpy = testFormatter.addAdjacentStatements as jasmine.Spy;
|
||||
|
@ -511,39 +537,38 @@ UndecoratedBase.ɵdir = ɵngcc0.ɵɵdefineDirective({ type: UndecoratedBase, vie
|
|||
describe('source map merging', () => {
|
||||
it('should merge any inline source map from the original file and write the output as an inline source map',
|
||||
() => {
|
||||
const sourceFiles: TestFile[] = [{
|
||||
name: JS_CONTENT.name,
|
||||
contents: JS_CONTENT.contents + '\n' + JS_CONTENT_MAP.toComment()
|
||||
}];
|
||||
const {decorationAnalyses, renderer, switchMarkerAnalyses,
|
||||
privateDeclarationsAnalyses} =
|
||||
createTestRenderer(
|
||||
'test-package', [{
|
||||
...INPUT_PROGRAM,
|
||||
contents: INPUT_PROGRAM.contents + '\n' + INPUT_PROGRAM_MAP.toComment()
|
||||
}]);
|
||||
const result = renderer.renderProgram(
|
||||
privateDeclarationsAnalyses} = createTestRenderer('test-package', sourceFiles);
|
||||
const [sourceFile, mapFile] = renderer.renderProgram(
|
||||
decorationAnalyses, switchMarkerAnalyses, privateDeclarationsAnalyses);
|
||||
expect(result[0].path).toEqual(_('/node_modules/test-package/src/file.js'));
|
||||
expect(result[0].contents)
|
||||
expect(sourceFile.path).toEqual(_('/node_modules/test-package/src/file.js'));
|
||||
expect(sourceFile.contents)
|
||||
.toEqual(RENDERED_CONTENTS + '\n' + MERGED_OUTPUT_PROGRAM_MAP.toComment());
|
||||
expect(result[1]).toBeUndefined();
|
||||
expect(mapFile).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should merge any external source map from the original file and write the output to an external source map',
|
||||
() => {
|
||||
const sourceFiles: TestFile[] = [{
|
||||
...INPUT_PROGRAM,
|
||||
contents: INPUT_PROGRAM.contents + '\n//# sourceMappingURL=file.js.map'
|
||||
name: JS_CONTENT.name,
|
||||
contents: JS_CONTENT.contents + '\n//# sourceMappingURL=file.js.map'
|
||||
}];
|
||||
const mappingFiles: TestFile[] =
|
||||
[{name: _(INPUT_PROGRAM.name + '.map'), contents: INPUT_PROGRAM_MAP.toJSON()}];
|
||||
[{name: _(JS_CONTENT.name + '.map'), contents: JS_CONTENT_MAP.toJSON()}];
|
||||
const {decorationAnalyses, renderer, switchMarkerAnalyses,
|
||||
privateDeclarationsAnalyses} =
|
||||
createTestRenderer('test-package', sourceFiles, undefined, mappingFiles);
|
||||
const result = renderer.renderProgram(
|
||||
const [sourceFile, mapFile] = renderer.renderProgram(
|
||||
decorationAnalyses, switchMarkerAnalyses, privateDeclarationsAnalyses);
|
||||
expect(result[0].path).toEqual(_('/node_modules/test-package/src/file.js'));
|
||||
expect(result[0].contents)
|
||||
expect(sourceFile.path).toEqual(_('/node_modules/test-package/src/file.js'));
|
||||
expect(sourceFile.contents)
|
||||
.toEqual(RENDERED_CONTENTS + '\n' + generateMapFileComment('file.js.map'));
|
||||
expect(result[1].path).toEqual(_('/node_modules/test-package/src/file.js.map'));
|
||||
expect(JSON.parse(result[1].contents)).toEqual(MERGED_OUTPUT_PROGRAM_MAP.toObject());
|
||||
expect(mapFile.path).toEqual(_('/node_modules/test-package/src/file.js.map'));
|
||||
expect(JSON.parse(mapFile.contents)).toEqual(MERGED_OUTPUT_PROGRAM_MAP.toObject());
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
import {compareSegments, offsetSegment, segmentDiff} from '../../src/sourcemaps/segment_marker';
|
||||
import {computeLineLengths} from '../../src/sourcemaps/source_file';
|
||||
|
||||
describe('SegmentMarker utils', () => {
|
||||
describe('compareSegments()', () => {
|
||||
it('should return 0 if the segments are the same', () => {
|
||||
expect(compareSegments({line: 0, column: 0}, {line: 0, column: 0})).toEqual(0);
|
||||
expect(compareSegments({line: 123, column: 0}, {line: 123, column: 0})).toEqual(0);
|
||||
expect(compareSegments({line: 0, column: 45}, {line: 0, column: 45})).toEqual(0);
|
||||
expect(compareSegments({line: 123, column: 45}, {line: 123, column: 45})).toEqual(0);
|
||||
});
|
||||
|
||||
it('should return a negative number if the first segment is before the second segment', () => {
|
||||
expect(compareSegments({line: 0, column: 0}, {line: 0, column: 45})).toBeLessThan(0);
|
||||
expect(compareSegments({line: 123, column: 0}, {line: 123, column: 45})).toBeLessThan(0);
|
||||
expect(compareSegments({line: 13, column: 45}, {line: 123, column: 45})).toBeLessThan(0);
|
||||
expect(compareSegments({line: 13, column: 45}, {line: 123, column: 9})).toBeLessThan(0);
|
||||
});
|
||||
|
||||
it('should return a positive number if the first segment is after the second segment', () => {
|
||||
expect(compareSegments({line: 0, column: 45}, {line: 0, column: 0})).toBeGreaterThan(0);
|
||||
expect(compareSegments({line: 123, column: 45}, {line: 123, column: 0})).toBeGreaterThan(0);
|
||||
expect(compareSegments({line: 123, column: 45}, {line: 13, column: 45})).toBeGreaterThan(0);
|
||||
expect(compareSegments({line: 123, column: 9}, {line: 13, column: 45})).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('segmentDiff()', () => {
|
||||
it('should return 0 if the segments are the same', () => {
|
||||
const lineLengths = computeLineLengths('abcdef\nabcdefghj\nabcdefghijklm\nabcdef');
|
||||
expect(segmentDiff(lineLengths, {line: 0, column: 0}, {line: 0, column: 0})).toEqual(0);
|
||||
expect(segmentDiff(lineLengths, {line: 3, column: 0}, {line: 3, column: 0})).toEqual(0);
|
||||
expect(segmentDiff(lineLengths, {line: 0, column: 5}, {line: 0, column: 5})).toEqual(0);
|
||||
expect(segmentDiff(lineLengths, {line: 3, column: 5}, {line: 3, column: 5})).toEqual(0);
|
||||
});
|
||||
|
||||
it('should return the column difference if the markers are on the same line', () => {
|
||||
const lineLengths = computeLineLengths('abcdef\nabcdefghj\nabcdefghijklm\nabcdef');
|
||||
expect(segmentDiff(lineLengths, {line: 0, column: 0}, {line: 0, column: 3})).toEqual(3);
|
||||
expect(segmentDiff(lineLengths, {line: 1, column: 1}, {line: 1, column: 5})).toEqual(4);
|
||||
expect(segmentDiff(lineLengths, {line: 2, column: 5}, {line: 2, column: 1})).toEqual(-4);
|
||||
expect(segmentDiff(lineLengths, {line: 3, column: 3}, {line: 3, column: 0})).toEqual(-3);
|
||||
});
|
||||
|
||||
it('should return the number of actual characters difference (including newlineLengths) if not on the same line',
|
||||
() => {
|
||||
let lineLengths: number[];
|
||||
|
||||
lineLengths = computeLineLengths('A12345\nB123456789');
|
||||
expect(segmentDiff(lineLengths, {line: 0, column: 0}, {line: 1, column: 0}))
|
||||
.toEqual(6 + 1);
|
||||
|
||||
lineLengths = computeLineLengths('012A45\n01234B6789');
|
||||
expect(segmentDiff(lineLengths, {line: 0, column: 3}, {line: 1, column: 5}))
|
||||
.toEqual(3 + 1 + 5);
|
||||
|
||||
lineLengths = computeLineLengths('012345\n012345A789\n01234567\nB123456');
|
||||
expect(segmentDiff(lineLengths, {line: 1, column: 6}, {line: 3, column: 0}))
|
||||
.toEqual(4 + 1 + 8 + 1 + 0);
|
||||
|
||||
lineLengths = computeLineLengths('012345\nA123456789\n01234567\n012B456');
|
||||
expect(segmentDiff(lineLengths, {line: 1, column: 0}, {line: 3, column: 3}))
|
||||
.toEqual(10 + 1 + 8 + 1 + 3);
|
||||
|
||||
lineLengths = computeLineLengths('012345\nB123456789\nA1234567\n0123456');
|
||||
expect(segmentDiff(lineLengths, {line: 2, column: 0}, {line: 1, column: 0}))
|
||||
.toEqual(0 - 1 - 10 + 0);
|
||||
|
||||
lineLengths = computeLineLengths('012345\n0123B56789\n01234567\n012A456');
|
||||
expect(segmentDiff(lineLengths, {line: 3, column: 3}, {line: 1, column: 4}))
|
||||
.toEqual(-3 - 1 - 8 - 1 - 10 + 4);
|
||||
|
||||
lineLengths = computeLineLengths('B12345\n0123456789\n0123A567\n0123456');
|
||||
expect(segmentDiff(lineLengths, {line: 2, column: 4}, {line: 0, column: 0}))
|
||||
.toEqual(-4 - 1 - 10 - 1 - 6 + 0);
|
||||
|
||||
lineLengths = computeLineLengths('0123B5\n0123456789\nA1234567\n0123456');
|
||||
expect(segmentDiff(lineLengths, {line: 2, column: 0}, {line: 0, column: 4}))
|
||||
.toEqual(0 - 1 - 10 - 1 - 6 + 4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('offsetSegment()', () => {
|
||||
it('should return an identical marker if offset is 0', () => {
|
||||
const lineLengths = computeLineLengths('012345\n0123456789\n01234567\n0123456');
|
||||
const marker = {line: 2, column: 3};
|
||||
expect(offsetSegment(lineLengths, marker, 0)).toBe(marker);
|
||||
});
|
||||
|
||||
it('should return a new marker offset by the given chars', () => {
|
||||
const lineLengths = computeLineLengths('012345\n0123456789\n012*4567\n0123456');
|
||||
const marker = {line: 2, column: 3};
|
||||
expect(offsetSegment(lineLengths, marker, 1)).toEqual({line: 2, column: 4});
|
||||
expect(offsetSegment(lineLengths, marker, 2)).toEqual({line: 2, column: 5});
|
||||
expect(offsetSegment(lineLengths, marker, 4)).toEqual({line: 2, column: 7});
|
||||
expect(offsetSegment(lineLengths, marker, 8)).toEqual({line: 3, column: 2});
|
||||
expect(offsetSegment(lineLengths, marker, -1)).toEqual({line: 2, column: 2});
|
||||
expect(offsetSegment(lineLengths, marker, -2)).toEqual({line: 2, column: 1});
|
||||
expect(offsetSegment(lineLengths, marker, -4)).toEqual({line: 1, column: 10});
|
||||
expect(offsetSegment(lineLengths, marker, -6)).toEqual({line: 1, column: 8});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,243 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
import {FileSystem, absoluteFrom, getFileSystem} from '@angular/compiler-cli/src/ngtsc/file_system';
|
||||
import {fromObject} from 'convert-source-map';
|
||||
|
||||
import {runInEachFileSystem} from '../../../src/ngtsc/file_system/testing';
|
||||
import {RawSourceMap} from '../../src/sourcemaps/raw_source_map';
|
||||
import {SourceFileLoader as SourceFileLoader} from '../../src/sourcemaps/source_file_loader';
|
||||
|
||||
runInEachFileSystem(() => {
|
||||
describe('SourceFileLoader', () => {
|
||||
let fs: FileSystem;
|
||||
let _: typeof absoluteFrom;
|
||||
let registry: SourceFileLoader;
|
||||
beforeEach(() => {
|
||||
fs = getFileSystem();
|
||||
_ = absoluteFrom;
|
||||
registry = new SourceFileLoader(fs);
|
||||
});
|
||||
|
||||
describe('loadSourceFile', () => {
|
||||
it('should load a file with no source map and inline contents', () => {
|
||||
const sourceFile = registry.loadSourceFile(_('/foo/src/index.js'), 'some inline content');
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.contents).toEqual('some inline content');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/index.js'));
|
||||
expect(sourceFile.rawMap).toEqual(null);
|
||||
expect(sourceFile.sources).toEqual([]);
|
||||
});
|
||||
|
||||
it('should load a file with no source map and read its contents from disk', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
fs.writeFile(_('/foo/src/index.js'), 'some external content');
|
||||
const sourceFile = registry.loadSourceFile(_('/foo/src/index.js'));
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.contents).toEqual('some external content');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/index.js'));
|
||||
expect(sourceFile.rawMap).toEqual(null);
|
||||
expect(sourceFile.sources).toEqual([]);
|
||||
});
|
||||
|
||||
it('should load a file with an external source map', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
const sourceMap = createRawSourceMap({file: 'index.js'});
|
||||
fs.writeFile(_('/foo/src/external.js.map'), JSON.stringify(sourceMap));
|
||||
const sourceFile = registry.loadSourceFile(
|
||||
_('/foo/src/index.js'), 'some inline content\n//# sourceMappingURL=external.js.map');
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toEqual(sourceMap);
|
||||
});
|
||||
|
||||
it('should handle a missing external source map', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
const sourceFile = registry.loadSourceFile(
|
||||
_('/foo/src/index.js'), 'some inline content\n//# sourceMappingURL=external.js.map');
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toBe(null);
|
||||
});
|
||||
|
||||
it('should load a file with an inline encoded source map', () => {
|
||||
const sourceMap = createRawSourceMap({file: 'index.js'});
|
||||
const encodedSourceMap = Buffer.from(JSON.stringify(sourceMap)).toString('base64');
|
||||
const sourceFile = registry.loadSourceFile(
|
||||
_('/foo/src/index.js'),
|
||||
`some inline content\n//# sourceMappingURL=data:application/json;charset=utf-8;base64,${encodedSourceMap}`);
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toEqual(sourceMap);
|
||||
});
|
||||
|
||||
it('should load a file with an implied source map', () => {
|
||||
const sourceMap = createRawSourceMap({file: 'index.js'});
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
fs.writeFile(_('/foo/src/index.js.map'), JSON.stringify(sourceMap));
|
||||
const sourceFile = registry.loadSourceFile(_('/foo/src/index.js'), 'some inline content');
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toEqual(sourceMap);
|
||||
});
|
||||
|
||||
it('should handle missing implied source-map file', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
const sourceFile = registry.loadSourceFile(_('/foo/src/index.js'), 'some inline content');
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toBe(null);
|
||||
});
|
||||
|
||||
it('should recurse into external original source files that are referenced from source maps',
|
||||
() => {
|
||||
// Setup a scenario where the generated files reference previous files:
|
||||
//
|
||||
// index.js
|
||||
// -> x.js
|
||||
// -> y.js
|
||||
// -> a.js
|
||||
// -> z.js (inline content)
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
const indexSourceMap = createRawSourceMap({
|
||||
file: 'index.js',
|
||||
sources: ['x.js', 'y.js', 'z.js'],
|
||||
'sourcesContent': [null, null, 'z content']
|
||||
});
|
||||
fs.writeFile(_('/foo/src/index.js.map'), JSON.stringify(indexSourceMap));
|
||||
|
||||
fs.writeFile(_('/foo/src/x.js'), 'x content');
|
||||
|
||||
const ySourceMap = createRawSourceMap({file: 'y.js', sources: ['a.js']});
|
||||
fs.writeFile(_('/foo/src/y.js'), 'y content');
|
||||
fs.writeFile(_('/foo/src/y.js.map'), JSON.stringify(ySourceMap));
|
||||
fs.writeFile(_('/foo/src/z.js'), 'z content');
|
||||
fs.writeFile(_('/foo/src/a.js'), 'a content');
|
||||
|
||||
const sourceFile = registry.loadSourceFile(_('/foo/src/index.js'), 'index content');
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
|
||||
expect(sourceFile.contents).toEqual('index content');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/index.js'));
|
||||
expect(sourceFile.rawMap).toEqual(indexSourceMap);
|
||||
|
||||
expect(sourceFile.sources.length).toEqual(3);
|
||||
|
||||
expect(sourceFile.sources[0] !.contents).toEqual('x content');
|
||||
expect(sourceFile.sources[0] !.sourcePath).toEqual(_('/foo/src/x.js'));
|
||||
expect(sourceFile.sources[0] !.rawMap).toEqual(null);
|
||||
expect(sourceFile.sources[0] !.sources).toEqual([]);
|
||||
|
||||
|
||||
expect(sourceFile.sources[1] !.contents).toEqual('y content');
|
||||
expect(sourceFile.sources[1] !.sourcePath).toEqual(_('/foo/src/y.js'));
|
||||
expect(sourceFile.sources[1] !.rawMap).toEqual(ySourceMap);
|
||||
|
||||
expect(sourceFile.sources[1] !.sources.length).toEqual(1);
|
||||
expect(sourceFile.sources[1] !.sources[0] !.contents).toEqual('a content');
|
||||
expect(sourceFile.sources[1] !.sources[0] !.sourcePath).toEqual(_('/foo/src/a.js'));
|
||||
expect(sourceFile.sources[1] !.sources[0] !.rawMap).toEqual(null);
|
||||
expect(sourceFile.sources[1] !.sources[0] !.sources).toEqual([]);
|
||||
|
||||
expect(sourceFile.sources[2] !.contents).toEqual('z content');
|
||||
expect(sourceFile.sources[2] !.sourcePath).toEqual(_('/foo/src/z.js'));
|
||||
expect(sourceFile.sources[2] !.rawMap).toEqual(null);
|
||||
expect(sourceFile.sources[2] !.sources).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle a missing source file referenced from a source-map', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
const indexSourceMap =
|
||||
createRawSourceMap({file: 'index.js', sources: ['x.js'], 'sourcesContent': [null]});
|
||||
fs.writeFile(_('/foo/src/index.js.map'), JSON.stringify(indexSourceMap));
|
||||
|
||||
const sourceFile = registry.loadSourceFile(_('/foo/src/index.js'), 'index content');
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
|
||||
expect(sourceFile.contents).toEqual('index content');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/index.js'));
|
||||
expect(sourceFile.rawMap).toEqual(indexSourceMap);
|
||||
expect(sourceFile.sources.length).toEqual(1);
|
||||
expect(sourceFile.sources[0]).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
it('should fail if there is a cyclic dependency in files loaded from disk', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
const aPath = _('/foo/src/a.js');
|
||||
fs.writeFile(
|
||||
aPath, 'a content\n' +
|
||||
fromObject(createRawSourceMap({file: 'a.js', sources: ['b.js']})).toComment());
|
||||
|
||||
const bPath = _('/foo/src/b.js');
|
||||
fs.writeFile(
|
||||
bPath, 'b content\n' +
|
||||
fromObject(createRawSourceMap({file: 'b.js', sources: ['c.js']})).toComment());
|
||||
|
||||
const cPath = _('/foo/src/c.js');
|
||||
fs.writeFile(
|
||||
cPath, 'c content\n' +
|
||||
fromObject(createRawSourceMap({file: 'c.js', sources: ['a.js']})).toComment());
|
||||
|
||||
expect(() => registry.loadSourceFile(aPath))
|
||||
.toThrowError(
|
||||
`Circular source file mapping dependency: ${aPath} -> ${bPath} -> ${cPath} -> ${aPath}`);
|
||||
});
|
||||
|
||||
it('should not fail if there is a cyclic dependency in filenames of inline sources', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
const aPath = _('/foo/src/a.js');
|
||||
fs.writeFile(
|
||||
aPath, 'a content\n' +
|
||||
fromObject(createRawSourceMap({file: 'a.js', sources: ['b.js']})).toComment());
|
||||
|
||||
const bPath = _('/foo/src/b.js');
|
||||
fs.writeFile(bPath, 'b content');
|
||||
fs.writeFile(
|
||||
_('/foo/src/b.js.map'),
|
||||
JSON.stringify(createRawSourceMap({file: 'b.js', sources: ['c.js']})));
|
||||
|
||||
const cPath = _('/foo/src/c.js');
|
||||
fs.writeFile(cPath, 'c content');
|
||||
fs.writeFile(
|
||||
_('/foo/src/c.js.map'),
|
||||
JSON.stringify(createRawSourceMap(
|
||||
{file: 'c.js', sources: ['a.js'], sourcesContent: ['inline a.js content']})));
|
||||
|
||||
expect(() => registry.loadSourceFile(aPath)).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
function createRawSourceMap(custom: Partial<RawSourceMap>): RawSourceMap {
|
||||
return {
|
||||
'version': 3,
|
||||
'sourceRoot': '',
|
||||
'sources': [],
|
||||
'sourcesContent': [],
|
||||
'names': [],
|
||||
'mappings': '', ...custom
|
||||
};
|
||||
}
|
|
@ -0,0 +1,227 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
import {encode} from 'sourcemap-codec';
|
||||
|
||||
import {FileSystem, absoluteFrom, getFileSystem} from '../../../src/ngtsc/file_system';
|
||||
import {runInEachFileSystem} from '../../../src/ngtsc/file_system/testing';
|
||||
import {RawSourceMap} from '../../src/sourcemaps/raw_source_map';
|
||||
import {SourceFile, computeLineLengths, extractOriginalSegments, parseMappings} from '../../src/sourcemaps/source_file';
|
||||
|
||||
runInEachFileSystem(() => {
|
||||
describe('SourceFile and utilities', () => {
|
||||
let fs: FileSystem;
|
||||
let _: typeof absoluteFrom;
|
||||
|
||||
beforeEach(() => {
|
||||
fs = getFileSystem();
|
||||
_ = absoluteFrom;
|
||||
});
|
||||
|
||||
describe('parseMappings()', () => {
|
||||
it('should be an empty array for source files with no source map', () => {
|
||||
const mappings = parseMappings(null, []);
|
||||
expect(mappings).toEqual([]);
|
||||
});
|
||||
|
||||
it('should be empty array for source files with no source map mappings', () => {
|
||||
const rawSourceMap: RawSourceMap = {mappings: '', names: [], sources: [], version: 3};
|
||||
const mappings = parseMappings(rawSourceMap, []);
|
||||
expect(mappings).toEqual([]);
|
||||
});
|
||||
|
||||
it('should parse the mappings from the raw source map', () => {
|
||||
const rawSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [6, 0, 0, 3]]]),
|
||||
names: [],
|
||||
sources: ['a.js'],
|
||||
version: 3
|
||||
};
|
||||
const originalSource = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, false, []);
|
||||
const mappings = parseMappings(rawSourceMap, [originalSource]);
|
||||
expect(mappings).toEqual([
|
||||
{
|
||||
generatedSegment: {line: 0, column: 0},
|
||||
originalSource,
|
||||
originalSegment: {line: 0, column: 0},
|
||||
name: undefined
|
||||
},
|
||||
{
|
||||
generatedSegment: {line: 0, column: 6},
|
||||
originalSource,
|
||||
originalSegment: {line: 0, column: 3},
|
||||
name: undefined
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractOriginalSegments()', () => {
|
||||
it('should return an empty array for source files with no source map',
|
||||
() => { expect(extractOriginalSegments(parseMappings(null, []))).toEqual([]); });
|
||||
|
||||
it('should be empty array for source files with no source map mappings', () => {
|
||||
const rawSourceMap: RawSourceMap = {mappings: '', names: [], sources: [], version: 3};
|
||||
expect(extractOriginalSegments(parseMappings(rawSourceMap, []))).toEqual([]);
|
||||
});
|
||||
|
||||
it('should parse the segments in ascending order of original position from the raw source map',
|
||||
() => {
|
||||
const originalSource = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, false, []);
|
||||
const rawSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2]]]),
|
||||
names: [],
|
||||
sources: ['a.js'],
|
||||
version: 3
|
||||
};
|
||||
expect(extractOriginalSegments(parseMappings(rawSourceMap, [originalSource]))).toEqual([
|
||||
{line: 0, column: 0},
|
||||
{line: 0, column: 2},
|
||||
{line: 0, column: 3},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('SourceFile', () => {
|
||||
describe('flattenedMappings', () => {
|
||||
it('should be an empty array for source files with no source map', () => {
|
||||
const sourceFile =
|
||||
new SourceFile(_('/foo/src/index.js'), 'index contents', null, false, []);
|
||||
expect(sourceFile.flattenedMappings).toEqual([]);
|
||||
});
|
||||
|
||||
it('should be empty array for source files with no source map mappings', () => {
|
||||
const rawSourceMap: RawSourceMap = {mappings: '', names: [], sources: [], version: 3};
|
||||
const sourceFile =
|
||||
new SourceFile(_('/foo/src/index.js'), 'index contents', rawSourceMap, false, []);
|
||||
expect(sourceFile.flattenedMappings).toEqual([]);
|
||||
});
|
||||
|
||||
it('should be the same as non-flat mappings if there is only one level of source map',
|
||||
() => {
|
||||
const rawSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [6, 0, 0, 3]]]),
|
||||
names: [],
|
||||
sources: ['a.js'],
|
||||
version: 3
|
||||
};
|
||||
const originalSource = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, false, []);
|
||||
const sourceFile = new SourceFile(
|
||||
_('/foo/src/index.js'), 'abc123defg', rawSourceMap, false, [originalSource]);
|
||||
expect(sourceFile.flattenedMappings)
|
||||
.toEqual(parseMappings(rawSourceMap, [originalSource]));
|
||||
});
|
||||
|
||||
it('should merge mappings from flattened original source files', () => {
|
||||
const cSource = new SourceFile(_('/foo/src/c.js'), 'bcd123e', null, false, []);
|
||||
const bSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[1, 0, 0, 0], [4, 0, 0, 3], [4, 0, 0, 6], [5, 0, 0, 7]]]),
|
||||
names: [],
|
||||
sources: ['c.js'],
|
||||
version: 3
|
||||
};
|
||||
const bSource =
|
||||
new SourceFile(_('/foo/src/b.js'), 'abcdef', bSourceMap, false, [cSource]);
|
||||
const aSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]]]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
};
|
||||
const aSource =
|
||||
new SourceFile(_('/foo/src/a.js'), 'abdecf', aSourceMap, false, [bSource]);
|
||||
|
||||
expect(aSource.flattenedMappings).toEqual([
|
||||
{
|
||||
generatedSegment: {line: 0, column: 1},
|
||||
originalSource: cSource,
|
||||
originalSegment: {line: 0, column: 0},
|
||||
name: undefined
|
||||
},
|
||||
{
|
||||
generatedSegment: {line: 0, column: 2},
|
||||
originalSource: cSource,
|
||||
originalSegment: {line: 0, column: 2},
|
||||
name: undefined
|
||||
},
|
||||
{
|
||||
generatedSegment: {line: 0, column: 3},
|
||||
originalSource: cSource,
|
||||
originalSegment: {line: 0, column: 3},
|
||||
name: undefined
|
||||
},
|
||||
{
|
||||
generatedSegment: {line: 0, column: 3},
|
||||
originalSource: cSource,
|
||||
originalSegment: {line: 0, column: 6},
|
||||
name: undefined
|
||||
},
|
||||
{
|
||||
generatedSegment: {line: 0, column: 4},
|
||||
originalSource: cSource,
|
||||
originalSegment: {line: 0, column: 1},
|
||||
name: undefined
|
||||
},
|
||||
{
|
||||
generatedSegment: {line: 0, column: 5},
|
||||
originalSource: cSource,
|
||||
originalSegment: {line: 0, column: 7},
|
||||
name: undefined
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('renderFlattenedSourceMap()', () => {
|
||||
it('should convert the flattenedMappings into a raw source-map object', () => {
|
||||
const cSource = new SourceFile(_('/foo/src/c.js'), 'bcd123e', null, false, []);
|
||||
const bToCSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[1, 0, 0, 0], [4, 0, 0, 3], [4, 0, 0, 6], [5, 0, 0, 7]]]),
|
||||
names: [],
|
||||
sources: ['c.js'],
|
||||
version: 3
|
||||
};
|
||||
const bSource =
|
||||
new SourceFile(_('/foo/src/b.js'), 'abcdef', bToCSourceMap, false, [cSource]);
|
||||
const aToBSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]]]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
};
|
||||
const aSource =
|
||||
new SourceFile(_('/foo/src/a.js'), 'abdecf', aToBSourceMap, false, [bSource]);
|
||||
|
||||
const aTocSourceMap = aSource.renderFlattenedSourceMap();
|
||||
expect(aTocSourceMap.version).toEqual(3);
|
||||
expect(aTocSourceMap.file).toEqual('a.js');
|
||||
expect(aTocSourceMap.names).toEqual([]);
|
||||
expect(aTocSourceMap.sourceRoot).toBeUndefined();
|
||||
expect(aTocSourceMap.sources).toEqual(['c.js']);
|
||||
expect(aTocSourceMap.sourcesContent).toEqual(['bcd123e']);
|
||||
expect(aTocSourceMap.mappings).toEqual(encode([
|
||||
[[1, 0, 0, 0], [2, 0, 0, 2], [3, 0, 0, 3], [3, 0, 0, 6], [4, 0, 0, 1], [5, 0, 0, 7]]
|
||||
]));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('computeLineLengths()', () => {
|
||||
it('should compute the length of each line in the given string', () => {
|
||||
expect(computeLineLengths('')).toEqual([0]);
|
||||
expect(computeLineLengths('abc')).toEqual([3]);
|
||||
expect(computeLineLengths('\n')).toEqual([0, 0]);
|
||||
expect(computeLineLengths('\n\n')).toEqual([0, 0, 0]);
|
||||
expect(computeLineLengths('abc\n')).toEqual([3, 0]);
|
||||
expect(computeLineLengths('\nabc')).toEqual([0, 3]);
|
||||
expect(computeLineLengths('abc\ndefg')).toEqual([3, 4]);
|
||||
expect(computeLineLengths('abc\r\n')).toEqual([3, 0]);
|
||||
expect(computeLineLengths('abc\r\ndefg')).toEqual([3, 4]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -21,6 +21,7 @@
|
|||
"magic-string": "^0.25.0",
|
||||
"semver": "^6.3.0",
|
||||
"source-map": "^0.6.1",
|
||||
"sourcemap-codec": "^1.4.8",
|
||||
"yargs": "13.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
|
@ -48,7 +49,7 @@
|
|||
"ng-update": {
|
||||
"packageGroup": "NG_UPDATE_PACKAGE_GROUP"
|
||||
},
|
||||
"publishConfig":{
|
||||
"registry":"https://wombat-dressing-room.appspot.com"
|
||||
"publishConfig": {
|
||||
"registry": "https://wombat-dressing-room.appspot.com"
|
||||
}
|
||||
}
|
|
@ -13508,6 +13508,11 @@ sourcemap-codec@^1.4.4:
|
|||
resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.4.tgz#c63ea927c029dd6bd9a2b7fa03b3fec02ad56e9f"
|
||||
integrity sha512-CYAPYdBu34781kLHkaW3m6b/uUSyMOC2R61gcYMWooeuaGtjof86ZA/8T+qVPPt7np1085CR9hmMGrySwEc8Xg==
|
||||
|
||||
sourcemap-codec@^1.4.8:
|
||||
version "1.4.8"
|
||||
resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4"
|
||||
integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==
|
||||
|
||||
sparkles@^1.0.0:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/sparkles/-/sparkles-1.0.1.tgz#008db65edce6c50eec0c5e228e1945061dd0437c"
|
||||
|
|
Loading…
Reference in New Issue