fix(compiler-cli): handle pseudo cycles in inline source-maps (#40435)
When a source-map has an inline source, any source-map linked from that source should only be loaded if itself is also inline; it should not attempt to load a source-map from the file-system. Otherwise we can find ourselves with inadvertent infinite cyclic dependencies. For example, if a transpiler takes a file (e.g. index.js) and generates a new file overwriting the original file - capturing the original source inline in the new source-map (index.js.map) - the source file loader might read the inline original file (also index.js) and then try to load the `index.js.map` file from disk - ad infinitum. Note that the first call to `loadSourceFile()` is special, since you can pass in the source-file and source-map contents directly as in-memory strrngs. This is common if the transpiler has just generated these and has not yet written them to disk. When the contents are passed into `loadSourceFile()` directly, they are not treated as "inline" for the purposes described above since there is no chance of these "in-memory" source and source-map contents being caught up in a cyclic dependency. Fixes #40408 PR Close #40435
This commit is contained in:
parent
d482f5cdd3
commit
dc06873c72
|
@ -11,7 +11,7 @@ import * as ts from 'typescript';
|
|||
|
||||
import {absoluteFrom, absoluteFromSourceFile, ReadonlyFileSystem} from '../../../src/ngtsc/file_system';
|
||||
import {Logger} from '../../../src/ngtsc/logging';
|
||||
import {RawSourceMap, SourceFileLoader} from '../../../src/ngtsc/sourcemaps';
|
||||
import {ContentOrigin, RawSourceMap, SourceFileLoader} from '../../../src/ngtsc/sourcemaps';
|
||||
|
||||
import {FileToWrite} from './utils';
|
||||
|
||||
|
@ -28,22 +28,22 @@ export interface SourceMapInfo {
|
|||
export function renderSourceAndMap(
|
||||
logger: Logger, fs: ReadonlyFileSystem, sourceFile: ts.SourceFile,
|
||||
generatedMagicString: MagicString): FileToWrite[] {
|
||||
const generatedPath = absoluteFromSourceFile(sourceFile);
|
||||
const generatedMapPath = absoluteFrom(`${generatedPath}.map`);
|
||||
const sourceFilePath = absoluteFromSourceFile(sourceFile);
|
||||
const sourceMapPath = absoluteFrom(`${sourceFilePath}.map`);
|
||||
const generatedContent = generatedMagicString.toString();
|
||||
const generatedMap: RawSourceMap = generatedMagicString.generateMap(
|
||||
{file: generatedPath, source: generatedPath, includeContent: true});
|
||||
{file: sourceFilePath, source: sourceFilePath, includeContent: true});
|
||||
|
||||
try {
|
||||
const loader = new SourceFileLoader(fs, logger, {});
|
||||
const generatedFile = loader.loadSourceFile(
|
||||
generatedPath, generatedContent, {map: generatedMap, mapPath: generatedMapPath});
|
||||
sourceFilePath, generatedContent, {map: generatedMap, mapPath: sourceMapPath});
|
||||
|
||||
const rawMergedMap: RawSourceMap = generatedFile.renderFlattenedSourceMap();
|
||||
const mergedMap = fromObject(rawMergedMap);
|
||||
const firstSource = generatedFile.sources[0];
|
||||
if (firstSource && (firstSource.rawMap !== null || !sourceFile.isDeclarationFile) &&
|
||||
firstSource.inline) {
|
||||
const originalFile = loader.loadSourceFile(sourceFilePath, generatedMagicString.original);
|
||||
if (originalFile.rawMap === null && !sourceFile.isDeclarationFile ||
|
||||
originalFile.rawMap?.origin === ContentOrigin.Inline) {
|
||||
// We render an inline source map if one of:
|
||||
// * there was no input source map and this is not a typings file;
|
||||
// * the input source map exists and was inline.
|
||||
|
@ -52,21 +52,21 @@ export function renderSourceAndMap(
|
|||
// the input file because these inline source maps can be very large and it impacts on the
|
||||
// performance of IDEs that need to read them to provide intellisense etc.
|
||||
return [
|
||||
{path: generatedPath, contents: `${generatedFile.contents}\n${mergedMap.toComment()}`}
|
||||
];
|
||||
} else {
|
||||
const sourceMapComment = generateMapFileComment(`${fs.basename(generatedPath)}.map`);
|
||||
return [
|
||||
{path: generatedPath, contents: `${generatedFile.contents}\n${sourceMapComment}`},
|
||||
{path: generatedMapPath, contents: mergedMap.toJSON()}
|
||||
{path: sourceFilePath, contents: `${generatedFile.contents}\n${mergedMap.toComment()}`}
|
||||
];
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error(`Error when flattening the source-map "${generatedMapPath}" for "${
|
||||
generatedPath}": ${e.toString()}`);
|
||||
|
||||
const sourceMapComment = generateMapFileComment(`${fs.basename(sourceFilePath)}.map`);
|
||||
return [
|
||||
{path: generatedPath, contents: generatedContent},
|
||||
{path: generatedMapPath, contents: fromObject(generatedMap).toJSON()},
|
||||
{path: sourceFilePath, contents: `${generatedFile.contents}\n${sourceMapComment}`},
|
||||
{path: sourceMapPath, contents: mergedMap.toJSON()}
|
||||
];
|
||||
} catch (e) {
|
||||
logger.error(`Error when flattening the source-map "${sourceMapPath}" for "${
|
||||
sourceFilePath}": ${e.toString()}`);
|
||||
return [
|
||||
{path: sourceFilePath, contents: generatedContent},
|
||||
{path: sourceMapPath, contents: fromObject(generatedMap).toJSON()},
|
||||
];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
export {RawSourceMap} from './src/raw_source_map';
|
||||
export {ContentOrigin} from './src/content_origin';
|
||||
export {MapAndPath, RawSourceMap} from './src/raw_source_map';
|
||||
export {Mapping, SourceFile} from './src/source_file';
|
||||
export {MapAndPath, SourceFileLoader} from './src/source_file_loader';
|
||||
export {SourceFileLoader} from './src/source_file_loader';
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright Google LLC All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
|
||||
/**
|
||||
* From where the content for a source file or source-map came.
|
||||
*
|
||||
* - Source files can be linked to source-maps by:
|
||||
* - providing the content inline via a base64 encoded data comment,
|
||||
* - providing a URL to the file path in a comment,
|
||||
* - the loader inferring the source-map path from the source file path.
|
||||
* - Source-maps can link to source files by:
|
||||
* - providing the content inline in the `sourcesContent` property
|
||||
* - providing the path to the file in the `sources` property
|
||||
*/
|
||||
export enum ContentOrigin {
|
||||
/**
|
||||
* The contents were provided programmatically when calling `loadSourceFile()`.
|
||||
*/
|
||||
Provided,
|
||||
/**
|
||||
* The contents were extracted directly form the contents of the referring file.
|
||||
*/
|
||||
Inline,
|
||||
/**
|
||||
* The contents were loaded from the file-system, after being explicitly referenced or inferred
|
||||
* from the referring file.
|
||||
*/
|
||||
FileSystem,
|
||||
}
|
|
@ -5,6 +5,8 @@
|
|||
* Use of this source code is governed by an MIT-style license that can be
|
||||
* found in the LICENSE file at https://angular.io/license
|
||||
*/
|
||||
import {AbsoluteFsPath} from '../../file_system';
|
||||
import {ContentOrigin} from './content_origin';
|
||||
|
||||
/**
|
||||
* This interface is the basic structure of the JSON in a raw source map that one might load from
|
||||
|
@ -19,3 +21,22 @@ export interface RawSourceMap {
|
|||
sourcesContent?: (string|null)[];
|
||||
mappings: string;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The path and content of a source-map.
|
||||
*/
|
||||
export interface MapAndPath {
|
||||
/** The path to the source map if it was external or `null` if it was inline. */
|
||||
mapPath: AbsoluteFsPath|null;
|
||||
/** The raw source map itself. */
|
||||
map: RawSourceMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Information about a loaded source-map.
|
||||
*/
|
||||
export interface SourceMapInfo extends MapAndPath {
|
||||
/** From where the content for this source-map came. */
|
||||
origin: ContentOrigin;
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ import {decode, encode, SourceMapMappings, SourceMapSegment} from 'sourcemap-cod
|
|||
|
||||
import {AbsoluteFsPath, PathManipulation} from '../../file_system';
|
||||
|
||||
import {RawSourceMap} from './raw_source_map';
|
||||
import {RawSourceMap, SourceMapInfo} from './raw_source_map';
|
||||
import {compareSegments, offsetSegment, SegmentMarker} from './segment_marker';
|
||||
|
||||
export function removeSourceMapComments(contents: string): string {
|
||||
|
@ -33,10 +33,8 @@ export class SourceFile {
|
|||
readonly sourcePath: AbsoluteFsPath,
|
||||
/** The contents of this source file. */
|
||||
readonly contents: string,
|
||||
/** The raw source map (if any) associated with this source file. */
|
||||
readonly rawMap: RawSourceMap|null,
|
||||
/** Whether this source file's source map was inline or external. */
|
||||
readonly inline: boolean,
|
||||
/** The raw source map (if any) referenced by this source file. */
|
||||
readonly rawMap: SourceMapInfo|null,
|
||||
/** Any source files referenced by the raw source map associated with this source file. */
|
||||
readonly sources: (SourceFile|null)[],
|
||||
private fs: PathManipulation,
|
||||
|
@ -141,7 +139,8 @@ export class SourceFile {
|
|||
* source files with no transitive source maps.
|
||||
*/
|
||||
private flattenMappings(): Mapping[] {
|
||||
const mappings = parseMappings(this.rawMap, this.sources, this.startOfLinePositions);
|
||||
const mappings =
|
||||
parseMappings(this.rawMap && this.rawMap.map, this.sources, this.startOfLinePositions);
|
||||
ensureOriginalSegmentLinks(mappings);
|
||||
const flattenedMappings: Mapping[] = [];
|
||||
for (let mappingIndex = 0; mappingIndex < mappings.length; mappingIndex++) {
|
||||
|
|
|
@ -10,7 +10,8 @@ import {commentRegex, fromComment, mapFileCommentRegex} from 'convert-source-map
|
|||
import {AbsoluteFsPath, ReadonlyFileSystem} from '../../file_system';
|
||||
import {Logger} from '../../logging';
|
||||
|
||||
import {RawSourceMap} from './raw_source_map';
|
||||
import {ContentOrigin} from './content_origin';
|
||||
import {MapAndPath, RawSourceMap, SourceMapInfo} from './raw_source_map';
|
||||
import {SourceFile} from './source_file';
|
||||
|
||||
const SCHEME_MATCHER = /^([a-z][a-z0-9.-]*):\/\//i;
|
||||
|
@ -33,7 +34,8 @@ export class SourceFileLoader {
|
|||
private schemeMap: Record<string, AbsoluteFsPath>) {}
|
||||
|
||||
/**
|
||||
* Load a source file, compute its source map, and recursively load any referenced source files.
|
||||
* Load a source file from the provided content and source map, and recursively load any
|
||||
* referenced source files.
|
||||
*
|
||||
* @param sourcePath The path to the source file to load.
|
||||
* @param contents The contents of the source file to load.
|
||||
|
@ -41,24 +43,51 @@ export class SourceFileLoader {
|
|||
* @returns a SourceFile object created from the `contents` and provided source-map info.
|
||||
*/
|
||||
loadSourceFile(sourcePath: AbsoluteFsPath, contents: string, mapAndPath: MapAndPath): SourceFile;
|
||||
/**
|
||||
* Load a source file from the provided content, compute its source map, and recursively load any
|
||||
* referenced source files.
|
||||
*
|
||||
* @param sourcePath The path to the source file to load.
|
||||
* @param contents The contents of the source file to load.
|
||||
* @returns a SourceFile object created from the `contents` and computed source-map info.
|
||||
*/
|
||||
loadSourceFile(sourcePath: AbsoluteFsPath, contents: string): SourceFile;
|
||||
/**
|
||||
* Load a source file from the file-system, compute its source map, and recursively load any
|
||||
* referenced source files.
|
||||
*
|
||||
* @param sourcePath The path to the source file to load.
|
||||
* @returns a SourceFile object if its contents could be loaded from disk, or null otherwise.
|
||||
*/
|
||||
loadSourceFile(sourcePath: AbsoluteFsPath): SourceFile|null;
|
||||
loadSourceFile(
|
||||
sourcePath: AbsoluteFsPath, contents: string|null = null,
|
||||
mapAndPath: MapAndPath|null = null): SourceFile|null {
|
||||
const contentsOrigin = contents !== null ? ContentOrigin.Provided : ContentOrigin.FileSystem;
|
||||
const sourceMapInfo: SourceMapInfo|null =
|
||||
mapAndPath && {origin: ContentOrigin.Provided, ...mapAndPath};
|
||||
return this.loadSourceFileInternal(sourcePath, contents, contentsOrigin, sourceMapInfo);
|
||||
}
|
||||
|
||||
/**
|
||||
* The overload used internally to load source files referenced in a source-map.
|
||||
*
|
||||
* In this case there is no guarantee that it will return a non-null SourceMap.
|
||||
*
|
||||
* @param sourcePath The path to the source file to load.
|
||||
* @param contents The contents of the source file to load, if provided inline.
|
||||
* If it is not known the contents will be read from the file at the `sourcePath`.
|
||||
* @param mapAndPath The raw source-map and the path to the source-map file.
|
||||
* @param contents The contents of the source file to load, if provided inline. If `null`,
|
||||
* the contents will be read from the file at the `sourcePath`.
|
||||
* @param sourceOrigin Describes where the source content came from.
|
||||
* @param sourceMapInfo The raw contents and path of the source-map file. If `null` the
|
||||
* source-map will be computed from the contents of the source file, either inline or loaded
|
||||
* from the file-system.
|
||||
*
|
||||
* @returns a SourceFile if the content for one was provided or able to be loaded from disk,
|
||||
* @returns a SourceFile if the content for one was provided or was able to be loaded from disk,
|
||||
* `null` otherwise.
|
||||
*/
|
||||
loadSourceFile(sourcePath: AbsoluteFsPath, contents?: string|null, mapAndPath?: null): SourceFile
|
||||
|null;
|
||||
loadSourceFile(
|
||||
sourcePath: AbsoluteFsPath, contents: string|null = null,
|
||||
mapAndPath: MapAndPath|null = null): SourceFile|null {
|
||||
private loadSourceFileInternal(
|
||||
sourcePath: AbsoluteFsPath, contents: string|null, sourceOrigin: ContentOrigin,
|
||||
sourceMapInfo: SourceMapInfo|null): SourceFile|null {
|
||||
const previousPaths = this.currentPaths.slice();
|
||||
try {
|
||||
if (contents === null) {
|
||||
|
@ -69,21 +98,17 @@ export class SourceFileLoader {
|
|||
}
|
||||
|
||||
// If not provided try to load the source map based on the source itself
|
||||
if (mapAndPath === null) {
|
||||
mapAndPath = this.loadSourceMap(sourcePath, contents);
|
||||
if (sourceMapInfo === null) {
|
||||
sourceMapInfo = this.loadSourceMap(sourcePath, contents, sourceOrigin);
|
||||
}
|
||||
|
||||
let map: RawSourceMap|null = null;
|
||||
let inline = true;
|
||||
let sources: (SourceFile|null)[] = [];
|
||||
if (mapAndPath !== null) {
|
||||
const basePath = mapAndPath.mapPath || sourcePath;
|
||||
sources = this.processSources(basePath, mapAndPath.map);
|
||||
map = mapAndPath.map;
|
||||
inline = mapAndPath.mapPath === null;
|
||||
if (sourceMapInfo !== null) {
|
||||
const basePath = sourceMapInfo.mapPath || sourcePath;
|
||||
sources = this.processSources(basePath, sourceMapInfo);
|
||||
}
|
||||
|
||||
return new SourceFile(sourcePath, contents, map, inline, sources, this.fs);
|
||||
return new SourceFile(sourcePath, contents, sourceMapInfo, sources, this.fs);
|
||||
} catch (e) {
|
||||
this.logger.warn(
|
||||
`Unable to fully load ${sourcePath} for source-map flattening: ${e.message}`);
|
||||
|
@ -100,15 +125,34 @@ export class SourceFileLoader {
|
|||
*
|
||||
* Source maps can be inline, as part of a base64 encoded comment, or external as a separate file
|
||||
* whose path is indicated in a comment or implied from the name of the source file itself.
|
||||
*
|
||||
* @param sourcePath the path to the source file.
|
||||
* @param sourceContents the contents of the source file.
|
||||
* @param sourceOrigin where the content of the source file came from.
|
||||
* @returns the parsed contents and path of the source-map, if loading was successful, null
|
||||
* otherwise.
|
||||
*/
|
||||
private loadSourceMap(sourcePath: AbsoluteFsPath, contents: string): MapAndPath|null {
|
||||
private loadSourceMap(
|
||||
sourcePath: AbsoluteFsPath, sourceContents: string,
|
||||
sourceOrigin: ContentOrigin): SourceMapInfo|null {
|
||||
// Only consider a source-map comment from the last non-empty line of the file, in case there
|
||||
// are embedded source-map comments elsewhere in the file (as can be the case with bundlers like
|
||||
// webpack).
|
||||
const lastLine = this.getLastNonEmptyLine(contents);
|
||||
const lastLine = this.getLastNonEmptyLine(sourceContents);
|
||||
const inline = commentRegex.exec(lastLine);
|
||||
if (inline !== null) {
|
||||
return {map: fromComment(inline.pop()!).sourcemap, mapPath: null};
|
||||
return {
|
||||
map: fromComment(inline.pop()!).sourcemap,
|
||||
mapPath: null,
|
||||
origin: ContentOrigin.Inline,
|
||||
};
|
||||
}
|
||||
|
||||
if (sourceOrigin === ContentOrigin.Inline) {
|
||||
// The source file was provided inline and its contents did not include an inline source-map.
|
||||
// So we don't try to load an external source-map from the file-system, since this can lead to
|
||||
// invalid circular dependencies.
|
||||
return null;
|
||||
}
|
||||
|
||||
const external = mapFileCommentRegex.exec(lastLine);
|
||||
|
@ -116,7 +160,11 @@ export class SourceFileLoader {
|
|||
try {
|
||||
const fileName = external[1] || external[2];
|
||||
const externalMapPath = this.fs.resolve(this.fs.dirname(sourcePath), fileName);
|
||||
return {map: this.readRawSourceMap(externalMapPath), mapPath: externalMapPath};
|
||||
return {
|
||||
map: this.readRawSourceMap(externalMapPath),
|
||||
mapPath: externalMapPath,
|
||||
origin: ContentOrigin.FileSystem,
|
||||
};
|
||||
} catch (e) {
|
||||
this.logger.warn(
|
||||
`Unable to fully load ${sourcePath} for source-map flattening: ${e.message}`);
|
||||
|
@ -126,7 +174,11 @@ export class SourceFileLoader {
|
|||
|
||||
const impliedMapPath = this.fs.resolve(sourcePath + '.map');
|
||||
if (this.fs.exists(impliedMapPath)) {
|
||||
return {map: this.readRawSourceMap(impliedMapPath), mapPath: impliedMapPath};
|
||||
return {
|
||||
map: this.readRawSourceMap(impliedMapPath),
|
||||
mapPath: impliedMapPath,
|
||||
origin: ContentOrigin.FileSystem,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
|
@ -136,13 +188,23 @@ export class SourceFileLoader {
|
|||
* Iterate over each of the "sources" for this source file's source map, recursively loading each
|
||||
* source file and its associated source map.
|
||||
*/
|
||||
private processSources(basePath: AbsoluteFsPath, map: RawSourceMap): (SourceFile|null)[] {
|
||||
private processSources(basePath: AbsoluteFsPath, {map, origin: sourceMapOrigin}: SourceMapInfo):
|
||||
(SourceFile|null)[] {
|
||||
const sourceRoot = this.fs.resolve(
|
||||
this.fs.dirname(basePath), this.replaceSchemeWithPath(map.sourceRoot || ''));
|
||||
return map.sources.map((source, index) => {
|
||||
const path = this.fs.resolve(sourceRoot, this.replaceSchemeWithPath(source));
|
||||
const content = map.sourcesContent && map.sourcesContent[index] || null;
|
||||
return this.loadSourceFile(path, content, null);
|
||||
// The origin of this source file is "inline" if we extracted it from the source-map's
|
||||
// `sourcesContent`, except when the source-map itself was "provided" in-memory.
|
||||
// An inline source file is treated as if it were from the file-system if the source-map that
|
||||
// contains it was provided in-memory. The first call to `loadSourceFile()` is special in that
|
||||
// if you "provide" the contents of the source-map in-memory then we don't want to block
|
||||
// loading sources from the file-system just because this source-map had an inline source.
|
||||
const sourceOrigin = content !== null && sourceMapOrigin !== ContentOrigin.Provided ?
|
||||
ContentOrigin.Inline :
|
||||
ContentOrigin.FileSystem;
|
||||
return this.loadSourceFileInternal(path, content, sourceOrigin, null);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -206,11 +268,3 @@ export class SourceFileLoader {
|
|||
SCHEME_MATCHER, (_: string, scheme: string) => this.schemeMap[scheme.toLowerCase()] || '');
|
||||
}
|
||||
}
|
||||
|
||||
/** A small helper structure that is returned from `loadSourceMap()`. */
|
||||
export interface MapAndPath {
|
||||
/** The path to the source map if it was external or `null` if it was inline. */
|
||||
mapPath: AbsoluteFsPath|null;
|
||||
/** The raw source map itself. */
|
||||
map: RawSourceMap;
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ import {absoluteFrom, FileSystem, getFileSystem} from '../../file_system';
|
|||
import {runInEachFileSystem} from '../../file_system/testing';
|
||||
import {MockLogger} from '../../logging/testing';
|
||||
import {RawSourceMap} from '../src/raw_source_map';
|
||||
import {SourceFileLoader as SourceFileLoader} from '../src/source_file_loader';
|
||||
import {SourceFileLoader} from '../src/source_file_loader';
|
||||
|
||||
runInEachFileSystem(() => {
|
||||
describe('SourceFileLoader', () => {
|
||||
|
@ -34,7 +34,7 @@ runInEachFileSystem(() => {
|
|||
}
|
||||
expect(sourceFile.contents).toEqual('some inline content');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/index.js'));
|
||||
expect(sourceFile.rawMap).toEqual(null);
|
||||
expect(sourceFile.rawMap).toBe(null);
|
||||
expect(sourceFile.sources).toEqual([]);
|
||||
});
|
||||
|
||||
|
@ -47,7 +47,7 @@ runInEachFileSystem(() => {
|
|||
}
|
||||
expect(sourceFile.contents).toEqual('some external content');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/index.js'));
|
||||
expect(sourceFile.rawMap).toEqual(null);
|
||||
expect(sourceFile.rawMap).toBe(null);
|
||||
expect(sourceFile.sources).toEqual([]);
|
||||
});
|
||||
|
||||
|
@ -60,7 +60,10 @@ runInEachFileSystem(() => {
|
|||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toEqual(sourceMap);
|
||||
if (sourceFile.rawMap === null) {
|
||||
return fail('Expected source map to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap.map).toEqual(sourceMap);
|
||||
});
|
||||
|
||||
it('should only read source-map comments from the last line of a file', () => {
|
||||
|
@ -76,7 +79,10 @@ runInEachFileSystem(() => {
|
|||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toEqual(sourceMap);
|
||||
if (sourceFile.rawMap === null) {
|
||||
return fail('Expected source map to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap.map).toEqual(sourceMap);
|
||||
});
|
||||
|
||||
for (const eolMarker of ['\n', '\r\n']) {
|
||||
|
@ -97,7 +103,10 @@ runInEachFileSystem(() => {
|
|||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toEqual(sourceMap);
|
||||
if (sourceFile.rawMap === null) {
|
||||
return fail('Expected source map to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap.map).toEqual(sourceMap);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -121,7 +130,10 @@ runInEachFileSystem(() => {
|
|||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toEqual(sourceMap);
|
||||
if (sourceFile.rawMap === null) {
|
||||
return fail('Expected source map to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap.map).toEqual(sourceMap);
|
||||
});
|
||||
|
||||
it('should load a file with an implied source map', () => {
|
||||
|
@ -132,7 +144,10 @@ runInEachFileSystem(() => {
|
|||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap).toEqual(sourceMap);
|
||||
if (sourceFile.rawMap === null) {
|
||||
return fail('Expected source map to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap.map).toEqual(sourceMap);
|
||||
});
|
||||
|
||||
it('should handle missing implied source-map file', () => {
|
||||
|
@ -177,29 +192,32 @@ runInEachFileSystem(() => {
|
|||
|
||||
expect(sourceFile.contents).toEqual('index content');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/index.js'));
|
||||
expect(sourceFile.rawMap).toEqual(indexSourceMap);
|
||||
if (sourceFile.rawMap === null) {
|
||||
return fail('Expected source map to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap.map).toEqual(indexSourceMap);
|
||||
|
||||
expect(sourceFile.sources.length).toEqual(3);
|
||||
|
||||
expect(sourceFile.sources[0]!.contents).toEqual('x content');
|
||||
expect(sourceFile.sources[0]!.sourcePath).toEqual(_('/foo/src/x.js'));
|
||||
expect(sourceFile.sources[0]!.rawMap).toEqual(null);
|
||||
expect(sourceFile.sources[0]!.rawMap).toBe(null);
|
||||
expect(sourceFile.sources[0]!.sources).toEqual([]);
|
||||
|
||||
|
||||
expect(sourceFile.sources[1]!.contents).toEqual('y content');
|
||||
expect(sourceFile.sources[1]!.sourcePath).toEqual(_('/foo/src/y.js'));
|
||||
expect(sourceFile.sources[1]!.rawMap).toEqual(ySourceMap);
|
||||
expect(sourceFile.sources[1]!.rawMap!.map).toEqual(ySourceMap);
|
||||
|
||||
expect(sourceFile.sources[1]!.sources.length).toEqual(1);
|
||||
expect(sourceFile.sources[1]!.sources[0]!.contents).toEqual('a content');
|
||||
expect(sourceFile.sources[1]!.sources[0]!.sourcePath).toEqual(_('/foo/src/a.js'));
|
||||
expect(sourceFile.sources[1]!.sources[0]!.rawMap).toEqual(null);
|
||||
expect(sourceFile.sources[1]!.sources[0]!.rawMap).toBe(null);
|
||||
expect(sourceFile.sources[1]!.sources[0]!.sources).toEqual([]);
|
||||
|
||||
expect(sourceFile.sources[2]!.contents).toEqual('z content');
|
||||
expect(sourceFile.sources[2]!.sourcePath).toEqual(_('/foo/src/z.js'));
|
||||
expect(sourceFile.sources[2]!.rawMap).toEqual(null);
|
||||
expect(sourceFile.sources[2]!.rawMap).toBe(null);
|
||||
expect(sourceFile.sources[2]!.sources).toEqual([]);
|
||||
});
|
||||
|
||||
|
@ -217,7 +235,10 @@ runInEachFileSystem(() => {
|
|||
|
||||
expect(sourceFile.contents).toEqual('index content');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/index.js'));
|
||||
expect(sourceFile.rawMap).toEqual(indexSourceMap);
|
||||
if (sourceFile.rawMap === null) {
|
||||
return fail('Expected source map to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap.map).toEqual(indexSourceMap);
|
||||
expect(sourceFile.sources.length).toEqual(1);
|
||||
expect(sourceFile.sources[0]).toBe(null);
|
||||
});
|
||||
|
@ -225,6 +246,10 @@ runInEachFileSystem(() => {
|
|||
|
||||
it('should log a warning if there is a cyclic dependency in source files loaded from disk',
|
||||
() => {
|
||||
// a.js -> a.js.map -> b.js -> b.js.map -> c.js -> c.js.map -> (external) a.js
|
||||
// ^^^^^^^^^^^^^^^
|
||||
// c.js.map incorrectly links to a.js, creating a cycle
|
||||
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
const aMap = createRawSourceMap({file: 'a.js', sources: ['b.js']});
|
||||
|
@ -248,7 +273,10 @@ runInEachFileSystem(() => {
|
|||
expect(sourceFile).not.toBe(null!);
|
||||
expect(sourceFile.contents).toEqual('a content\n');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/a.js'));
|
||||
expect(sourceFile.rawMap).toEqual(aMap);
|
||||
if (sourceFile.rawMap === null) {
|
||||
return fail('Expected source map to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap.map).toEqual(aMap);
|
||||
expect(sourceFile.sources.length).toEqual(1);
|
||||
|
||||
expect(logger.logs.warn[0][0])
|
||||
|
@ -259,65 +287,119 @@ runInEachFileSystem(() => {
|
|||
|
||||
it('should log a warning if there is a cyclic dependency in source maps loaded from disk',
|
||||
() => {
|
||||
// a.js -> a.js.map -> b.js -> a.js.map -> c.js
|
||||
// ^^^^^^^^
|
||||
// b.js incorrectly links to a.js.map, creating a cycle
|
||||
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
// Create a self-referencing source-map
|
||||
const aMap = createRawSourceMap({
|
||||
file: 'a.js',
|
||||
sources: ['a.js'],
|
||||
sourcesContent: ['inline a.js content\n//# sourceMappingURL=a.js.map']
|
||||
});
|
||||
const aMapPath = _('/foo/src/a.js.map');
|
||||
fs.writeFile(aMapPath, JSON.stringify(aMap));
|
||||
|
||||
const aPath = _('/foo/src/a.js');
|
||||
fs.writeFile(aPath, 'a.js content\n//# sourceMappingURL=a.js.map');
|
||||
|
||||
const sourceFile = registry.loadSourceFile(aPath)!;
|
||||
expect(sourceFile).not.toBe(null!);
|
||||
const aMap = createRawSourceMap({file: 'a.js', sources: ['b.js']});
|
||||
const aMapPath = _('/foo/src/a.js.map');
|
||||
fs.writeFile(aMapPath, JSON.stringify(aMap));
|
||||
|
||||
const bPath = _('/foo/src/b.js');
|
||||
fs.writeFile(bPath, 'b.js content\n//# sourceMappingURL=a.js.map');
|
||||
|
||||
const sourceFile = registry.loadSourceFile(aPath);
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.contents).toEqual('a.js content\n');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/a.js'));
|
||||
expect(sourceFile.rawMap).toEqual(aMap);
|
||||
if (sourceFile.rawMap === null) {
|
||||
return fail('Expected source map to be defined');
|
||||
}
|
||||
expect(sourceFile.rawMap.map).toEqual(aMap);
|
||||
expect(sourceFile.sources.length).toEqual(1);
|
||||
|
||||
expect(logger.logs.warn[0][0])
|
||||
.toContain(
|
||||
`Circular source file mapping dependency: ` +
|
||||
`${aPath} -> ${aMapPath} -> ${aMapPath}`);
|
||||
|
||||
const innerSourceFile = sourceFile.sources[0]!;
|
||||
expect(innerSourceFile).not.toBe(null!);
|
||||
expect(innerSourceFile.contents).toEqual('inline a.js content\n');
|
||||
expect(innerSourceFile.sourcePath).toEqual(_('/foo/src/a.js'));
|
||||
expect(innerSourceFile.rawMap).toEqual(null);
|
||||
`${aPath} -> ${aMapPath} -> ${bPath} -> ${aMapPath}`);
|
||||
const innerSourceFile = sourceFile.sources[0];
|
||||
if (innerSourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(innerSourceFile.contents).toEqual('b.js content\n');
|
||||
expect(innerSourceFile.sourcePath).toEqual(_('/foo/src/b.js'));
|
||||
// The source-map from b.js was not loaded as it would have caused a cycle
|
||||
expect(innerSourceFile.rawMap).toBe(null);
|
||||
expect(innerSourceFile.sources.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('should not fail if there is a cyclic dependency in filenames of inline sources', () => {
|
||||
it('should not fail if the filename of an inline source looks like a cyclic dependency', () => {
|
||||
// a.js -> (inline) a.js.map -> (inline) a.js
|
||||
// ^^^^^^^^^^^^^
|
||||
// a.js loads despite same name as previous file because it is inline
|
||||
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
const aPath = _('/foo/src/a.js');
|
||||
fs.writeFile(
|
||||
aPath,
|
||||
'a content\n' +
|
||||
fromObject(createRawSourceMap({file: 'a.js', sources: ['b.js']})).toComment());
|
||||
const aMap = createRawSourceMap(
|
||||
{file: 'a.js', sources: ['a.js'], sourcesContent: ['inline original a.js content']});
|
||||
fs.writeFile(aPath, 'a content\n' + fromObject(aMap).toComment());
|
||||
|
||||
const bPath = _('/foo/src/b.js');
|
||||
fs.writeFile(bPath, 'b content');
|
||||
fs.writeFile(
|
||||
_('/foo/src/b.js.map'),
|
||||
JSON.stringify(createRawSourceMap({file: 'b.js', sources: ['c.js']})));
|
||||
const sourceFile = registry.loadSourceFile(aPath);
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
expect(sourceFile.sources.length).toEqual(1);
|
||||
expect(sourceFile.sources[0]!.contents).toEqual('inline original a.js content');
|
||||
expect(sourceFile.sources[0]!.sourcePath).toEqual(aPath);
|
||||
expect(sourceFile.sources[0]!.rawMap).toBe(null);
|
||||
expect(sourceFile.sources[0]!.sources).toEqual([]);
|
||||
|
||||
const cPath = _('/foo/src/c.js');
|
||||
fs.writeFile(cPath, 'c content');
|
||||
fs.writeFile(
|
||||
_('/foo/src/c.js.map'),
|
||||
JSON.stringify(createRawSourceMap(
|
||||
{file: 'c.js', sources: ['a.js'], sourcesContent: ['inline a.js content']})));
|
||||
|
||||
expect(() => registry.loadSourceFile(aPath)).not.toThrow();
|
||||
expect(logger.logs.warn.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('should not load source-maps (after the initial map) from disk if the source file was inline',
|
||||
() => {
|
||||
// a.js -> (initial) a.js.map -> b.js -> b.js.map -> (inline) c.js -> c.js.map
|
||||
// ^^^^^^^^
|
||||
// c.js.map is not loaded because the referencing source file (c.js) was inline
|
||||
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
const aPath = _('/foo/src/a.js');
|
||||
fs.writeFile(aPath, 'a.js content\n//# sourceMappingURL=a.js.map');
|
||||
const aMapPath = _('/foo/src/a.js.map');
|
||||
const aMap = createRawSourceMap({file: 'a.js', sources: ['b.js']});
|
||||
fs.writeFile(aMapPath, JSON.stringify(aMap));
|
||||
|
||||
const bPath = _('/foo/src/b.js');
|
||||
fs.writeFile(bPath, 'b.js content\n//# sourceMappingURL=b.js.map');
|
||||
const bMapPath = _('/foo/src/b.js.map');
|
||||
const bMap = createRawSourceMap({
|
||||
file: 'b.js',
|
||||
sources: ['c.js'],
|
||||
sourcesContent: ['c content\n//# sourceMappingURL=c.js.map']
|
||||
});
|
||||
fs.writeFile(bMapPath, JSON.stringify(bMap));
|
||||
|
||||
const cMapPath = _('/foo/src/c.js.map');
|
||||
const cMap = createRawSourceMap({file: 'c.js', sources: ['d.js']});
|
||||
fs.writeFile(cMapPath, JSON.stringify(cMap));
|
||||
|
||||
const sourceFile = registry.loadSourceFile(aPath);
|
||||
if (sourceFile === null) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
const bSource = sourceFile.sources[0];
|
||||
if (!bSource) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
const cSource = bSource.sources[0];
|
||||
if (!cSource) {
|
||||
return fail('Expected source file to be defined');
|
||||
}
|
||||
// External c.js.map never gets loaded because c.js was inline source
|
||||
expect(cSource.rawMap).toBe(null);
|
||||
expect(cSource.sources).toEqual([]);
|
||||
|
||||
expect(logger.logs.warn.length).toEqual(0);
|
||||
});
|
||||
|
||||
for (const {scheme, mappedPath} of
|
||||
[{scheme: 'WEBPACK://', mappedPath: '/foo/src/index.ts'},
|
||||
{scheme: 'webpack://', mappedPath: '/foo/src/index.ts'},
|
||||
|
@ -342,7 +424,7 @@ runInEachFileSystem(() => {
|
|||
}
|
||||
expect(originalSource.contents).toEqual('original content');
|
||||
expect(originalSource.sourcePath).toEqual(_(mappedPath));
|
||||
expect(originalSource.rawMap).toEqual(null);
|
||||
expect(originalSource.rawMap).toBe(null);
|
||||
expect(originalSource.sources).toEqual([]);
|
||||
});
|
||||
|
||||
|
@ -366,7 +448,7 @@ runInEachFileSystem(() => {
|
|||
}
|
||||
expect(originalSource.contents).toEqual('original content');
|
||||
expect(originalSource.sourcePath).toEqual(_(mappedPath));
|
||||
expect(originalSource.rawMap).toEqual(null);
|
||||
expect(originalSource.rawMap).toBe(null);
|
||||
expect(originalSource.sources).toEqual([]);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -9,7 +9,8 @@ import {encode} from 'sourcemap-codec';
|
|||
|
||||
import {absoluteFrom, getFileSystem, PathManipulation} from '../../file_system';
|
||||
import {runInEachFileSystem} from '../../file_system/testing';
|
||||
import {RawSourceMap} from '../src/raw_source_map';
|
||||
import {ContentOrigin} from '../src/content_origin';
|
||||
import {RawSourceMap, SourceMapInfo} from '../src/raw_source_map';
|
||||
import {SegmentMarker} from '../src/segment_marker';
|
||||
import {computeStartOfLinePositions, ensureOriginalSegmentLinks, extractOriginalSegments, findLastMappingIndexBefore, Mapping, parseMappings, SourceFile} from '../src/source_file';
|
||||
|
||||
|
@ -42,7 +43,7 @@ runInEachFileSystem(() => {
|
|||
sources: ['a.js'],
|
||||
version: 3
|
||||
};
|
||||
const originalSource = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, false, [], fs);
|
||||
const originalSource = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, [], fs);
|
||||
const mappings = parseMappings(rawSourceMap, [originalSource], [0, 8]);
|
||||
expect(mappings).toEqual([
|
||||
{
|
||||
|
@ -73,8 +74,7 @@ runInEachFileSystem(() => {
|
|||
|
||||
it('should parse the segments in ascending order of original position from the raw source map',
|
||||
() => {
|
||||
const originalSource =
|
||||
new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, false, [], fs);
|
||||
const originalSource = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, [], fs);
|
||||
const rawSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2]]]),
|
||||
names: [],
|
||||
|
@ -91,8 +91,8 @@ runInEachFileSystem(() => {
|
|||
});
|
||||
|
||||
it('should create separate arrays for each original source file', () => {
|
||||
const sourceA = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, false, [], fs);
|
||||
const sourceB = new SourceFile(_('/foo/src/b.js'), '1234567', null, false, [], fs);
|
||||
const sourceA = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, [], fs);
|
||||
const sourceB = new SourceFile(_('/foo/src/b.js'), '1234567', null, [], fs);
|
||||
const rawSourceMap: RawSourceMap = {
|
||||
mappings:
|
||||
encode([[[0, 0, 0, 0], [2, 1, 0, 3], [4, 0, 0, 2], [5, 1, 0, 5], [6, 1, 0, 2]]]),
|
||||
|
@ -316,8 +316,8 @@ runInEachFileSystem(() => {
|
|||
describe('ensureOriginalSegmentLinks', () => {
|
||||
it('should add `next` properties to each segment that point to the next segment in the same source file',
|
||||
() => {
|
||||
const sourceA = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, false, [], fs);
|
||||
const sourceB = new SourceFile(_('/foo/src/b.js'), '1234567', null, false, [], fs);
|
||||
const sourceA = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, [], fs);
|
||||
const sourceB = new SourceFile(_('/foo/src/b.js'), '1234567', null, [], fs);
|
||||
const rawSourceMap: RawSourceMap = {
|
||||
mappings:
|
||||
encode([[[0, 0, 0, 0], [2, 1, 0, 3], [4, 0, 0, 2], [5, 1, 0, 5], [6, 1, 0, 2]]]),
|
||||
|
@ -338,55 +338,68 @@ runInEachFileSystem(() => {
|
|||
describe('SourceFile', () => {
|
||||
describe('flattenedMappings', () => {
|
||||
it('should be an empty array for source files with no source map', () => {
|
||||
const sourceFile =
|
||||
new SourceFile(_('/foo/src/index.js'), 'index contents', null, false, [], fs);
|
||||
const sourceFile = new SourceFile(_('/foo/src/index.js'), 'index contents', null, [], fs);
|
||||
expect(sourceFile.flattenedMappings).toEqual([]);
|
||||
});
|
||||
|
||||
it('should be empty array for source files with no source map mappings', () => {
|
||||
const rawSourceMap: RawSourceMap = {mappings: '', names: [], sources: [], version: 3};
|
||||
const rawSourceMap: SourceMapInfo = {
|
||||
map: {mappings: '', names: [], sources: [], version: 3},
|
||||
mapPath: null,
|
||||
origin: ContentOrigin.Provided
|
||||
};
|
||||
const sourceFile =
|
||||
new SourceFile(_('/foo/src/index.js'), 'index contents', rawSourceMap, false, [], fs);
|
||||
new SourceFile(_('/foo/src/index.js'), 'index contents', rawSourceMap, [], fs);
|
||||
expect(sourceFile.flattenedMappings).toEqual([]);
|
||||
});
|
||||
|
||||
it('should be the same as non-flat mappings if there is only one level of source map',
|
||||
() => {
|
||||
const rawSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [6, 0, 0, 3]]]),
|
||||
names: [],
|
||||
sources: ['a.js'],
|
||||
version: 3
|
||||
const rawSourceMap: SourceMapInfo = {
|
||||
mapPath: null,
|
||||
map: {
|
||||
mappings: encode([[[0, 0, 0, 0], [6, 0, 0, 3]]]),
|
||||
names: [],
|
||||
sources: ['a.js'],
|
||||
version: 3
|
||||
},
|
||||
origin: ContentOrigin.Provided,
|
||||
};
|
||||
const originalSource =
|
||||
new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, false, [], fs);
|
||||
const originalSource = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, [], fs);
|
||||
const sourceFile = new SourceFile(
|
||||
_('/foo/src/index.js'), 'abc123defg', rawSourceMap, false, [originalSource], fs);
|
||||
_('/foo/src/index.js'), 'abc123defg', rawSourceMap, [originalSource], fs);
|
||||
expect(removeOriginalSegmentLinks(sourceFile.flattenedMappings))
|
||||
.toEqual(parseMappings(rawSourceMap, [originalSource], [0, 11]));
|
||||
.toEqual(parseMappings(rawSourceMap.map, [originalSource], [0, 11]));
|
||||
});
|
||||
|
||||
it('should merge mappings from flattened original source files', () => {
|
||||
const cSource = new SourceFile(_('/foo/src/c.js'), 'bcd123', null, false, [], fs);
|
||||
const dSource = new SourceFile(_('/foo/src/d.js'), 'aef', null, false, [], fs);
|
||||
const cSource = new SourceFile(_('/foo/src/c.js'), 'bcd123', null, [], fs);
|
||||
const dSource = new SourceFile(_('/foo/src/d.js'), 'aef', null, [], fs);
|
||||
|
||||
const bSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 1, 0, 0], [1, 0, 0, 0], [4, 1, 0, 1]]]),
|
||||
names: [],
|
||||
sources: ['c.js', 'd.js'],
|
||||
version: 3
|
||||
const bSourceMap: SourceMapInfo = {
|
||||
mapPath: null,
|
||||
map: {
|
||||
mappings: encode([[[0, 1, 0, 0], [1, 0, 0, 0], [4, 1, 0, 1]]]),
|
||||
names: [],
|
||||
sources: ['c.js', 'd.js'],
|
||||
version: 3
|
||||
},
|
||||
origin: ContentOrigin.Provided,
|
||||
};
|
||||
const bSource = new SourceFile(
|
||||
_('/foo/src/b.js'), 'abcdef', bSourceMap, false, [cSource, dSource], fs);
|
||||
const bSource =
|
||||
new SourceFile(_('/foo/src/b.js'), 'abcdef', bSourceMap, [cSource, dSource], fs);
|
||||
|
||||
const aSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]]]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
const aSourceMap: SourceMapInfo = {
|
||||
mapPath: null,
|
||||
map: {
|
||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]]]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
},
|
||||
origin: ContentOrigin.Provided,
|
||||
};
|
||||
const aSource =
|
||||
new SourceFile(_('/foo/src/a.js'), 'abdecf', aSourceMap, false, [bSource], fs);
|
||||
const aSource = new SourceFile(_('/foo/src/a.js'), 'abdecf', aSourceMap, [bSource], fs);
|
||||
|
||||
expect(removeOriginalSegmentLinks(aSource.flattenedMappings)).toEqual([
|
||||
{
|
||||
|
@ -429,27 +442,34 @@ runInEachFileSystem(() => {
|
|||
});
|
||||
|
||||
it('should ignore mappings to missing source files', () => {
|
||||
const bSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[1, 0, 0, 0], [4, 0, 0, 3], [4, 0, 0, 6], [5, 0, 0, 7]]]),
|
||||
names: [],
|
||||
sources: ['c.js'],
|
||||
version: 3
|
||||
const bSourceMap: SourceMapInfo = {
|
||||
mapPath: null,
|
||||
map: {
|
||||
mappings: encode([[[1, 0, 0, 0], [4, 0, 0, 3], [4, 0, 0, 6], [5, 0, 0, 7]]]),
|
||||
names: [],
|
||||
sources: ['c.js'],
|
||||
version: 3
|
||||
},
|
||||
origin: ContentOrigin.Provided,
|
||||
};
|
||||
const bSource =
|
||||
new SourceFile(_('/foo/src/b.js'), 'abcdef', bSourceMap, false, [null], fs);
|
||||
const aSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]]]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
const bSource = new SourceFile(_('/foo/src/b.js'), 'abcdef', bSourceMap, [null], fs);
|
||||
const aSourceMap: SourceMapInfo = {
|
||||
mapPath: null,
|
||||
map: {
|
||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]]]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
},
|
||||
origin: ContentOrigin.Provided,
|
||||
};
|
||||
const aSource =
|
||||
new SourceFile(_('/foo/src/a.js'), 'abdecf', aSourceMap, false, [bSource], fs);
|
||||
const aSource = new SourceFile(_('/foo/src/a.js'), 'abdecf', aSourceMap, [bSource], fs);
|
||||
|
||||
// These flattened mappings are just the mappings from a to b.
|
||||
// (The mappings to c are dropped since there is no source file to map to.)
|
||||
// (The mappings to c are dropped since there is no source file to map
|
||||
// to.)
|
||||
expect(removeOriginalSegmentLinks(aSource.flattenedMappings))
|
||||
.toEqual(parseMappings(aSourceMap, [bSource], [0, 7]));
|
||||
.toEqual(parseMappings(aSourceMap.map, [bSource], [0, 7]));
|
||||
});
|
||||
|
||||
/**
|
||||
|
@ -467,23 +487,31 @@ runInEachFileSystem(() => {
|
|||
|
||||
describe('renderFlattenedSourceMap()', () => {
|
||||
it('should convert the flattenedMappings into a raw source-map object', () => {
|
||||
const cSource = new SourceFile(_('/foo/src/c.js'), 'bcd123e', null, false, [], fs);
|
||||
const bToCSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[1, 0, 0, 0], [4, 0, 0, 3], [4, 0, 0, 6], [5, 0, 0, 7]]]),
|
||||
names: [],
|
||||
sources: ['c.js'],
|
||||
version: 3
|
||||
const cSource = new SourceFile(_('/foo/src/c.js'), 'bcd123e', null, [], fs);
|
||||
const bToCSourceMap: SourceMapInfo = {
|
||||
mapPath: null,
|
||||
map: {
|
||||
mappings: encode([[[1, 0, 0, 0], [4, 0, 0, 3], [4, 0, 0, 6], [5, 0, 0, 7]]]),
|
||||
names: [],
|
||||
sources: ['c.js'],
|
||||
version: 3
|
||||
},
|
||||
origin: ContentOrigin.Provided,
|
||||
};
|
||||
const bSource =
|
||||
new SourceFile(_('/foo/src/b.js'), 'abcdef', bToCSourceMap, false, [cSource], fs);
|
||||
const aToBSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]]]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
new SourceFile(_('/foo/src/b.js'), 'abcdef', bToCSourceMap, [cSource], fs);
|
||||
const aToBSourceMap: SourceMapInfo = {
|
||||
mapPath: null,
|
||||
map: {
|
||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]]]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
},
|
||||
origin: ContentOrigin.Provided,
|
||||
};
|
||||
const aSource =
|
||||
new SourceFile(_('/foo/src/a.js'), 'abdecf', aToBSourceMap, false, [bSource], fs);
|
||||
new SourceFile(_('/foo/src/a.js'), 'abdecf', aToBSourceMap, [bSource], fs);
|
||||
|
||||
const aTocSourceMap = aSource.renderFlattenedSourceMap();
|
||||
expect(aTocSourceMap.version).toEqual(3);
|
||||
|
@ -498,20 +526,24 @@ runInEachFileSystem(() => {
|
|||
});
|
||||
|
||||
it('should handle mappings that map from lines outside of the actual content lines', () => {
|
||||
const bSource = new SourceFile(_('/foo/src/b.js'), 'abcdef', null, false, [], fs);
|
||||
const aToBSourceMap: RawSourceMap = {
|
||||
mappings: encode([
|
||||
[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]],
|
||||
[
|
||||
[0, 0, 0, 0], // Extra mapping from a non-existent line
|
||||
]
|
||||
]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
const bSource = new SourceFile(_('/foo/src/b.js'), 'abcdef', null, [], fs);
|
||||
const aToBSourceMap: SourceMapInfo = {
|
||||
mapPath: null,
|
||||
map: {
|
||||
mappings: encode([
|
||||
[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]],
|
||||
[
|
||||
[0, 0, 0, 0], // Extra mapping from a non-existent line
|
||||
]
|
||||
]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
},
|
||||
origin: ContentOrigin.Provided,
|
||||
};
|
||||
const aSource =
|
||||
new SourceFile(_('/foo/src/a.js'), 'abdecf', aToBSourceMap, false, [bSource], fs);
|
||||
new SourceFile(_('/foo/src/a.js'), 'abdecf', aToBSourceMap, [bSource], fs);
|
||||
|
||||
const aTocSourceMap = aSource.renderFlattenedSourceMap();
|
||||
expect(aTocSourceMap.version).toEqual(3);
|
||||
|
@ -520,31 +552,39 @@ runInEachFileSystem(() => {
|
|||
expect(aTocSourceMap.sourceRoot).toBeUndefined();
|
||||
expect(aTocSourceMap.sources).toEqual(['b.js']);
|
||||
expect(aTocSourceMap.sourcesContent).toEqual(['abcdef']);
|
||||
expect(aTocSourceMap.mappings).toEqual(aToBSourceMap.mappings);
|
||||
expect(aTocSourceMap.mappings).toEqual(aToBSourceMap.map.mappings);
|
||||
});
|
||||
|
||||
it('should consolidate source-files with the same relative path', () => {
|
||||
const cSource1 = new SourceFile(_('/foo/src/lib/c.js'), 'bcd123e', null, false, [], fs);
|
||||
const cSource2 = new SourceFile(_('/foo/src/lib/c.js'), 'bcd123e', null, false, [], fs);
|
||||
const cSource1 = new SourceFile(_('/foo/src/lib/c.js'), 'bcd123e', null, [], fs);
|
||||
const cSource2 = new SourceFile(_('/foo/src/lib/c.js'), 'bcd123e', null, [], fs);
|
||||
|
||||
const bToCSourceMap: RawSourceMap = {
|
||||
mappings: encode([[[1, 0, 0, 0], [4, 0, 0, 3], [4, 0, 0, 6], [5, 0, 0, 7]]]),
|
||||
names: [],
|
||||
sources: ['c.js'],
|
||||
version: 3
|
||||
const bToCSourceMap: SourceMapInfo = {
|
||||
mapPath: null,
|
||||
map: {
|
||||
mappings: encode([[[1, 0, 0, 0], [4, 0, 0, 3], [4, 0, 0, 6], [5, 0, 0, 7]]]),
|
||||
names: [],
|
||||
sources: ['c.js'],
|
||||
version: 3
|
||||
},
|
||||
origin: ContentOrigin.Provided,
|
||||
};
|
||||
const bSource = new SourceFile(
|
||||
_('/foo/src/lib/b.js'), 'abcdef', bToCSourceMap, false, [cSource1], fs);
|
||||
const bSource =
|
||||
new SourceFile(_('/foo/src/lib/b.js'), 'abcdef', bToCSourceMap, [cSource1], fs);
|
||||
|
||||
const aToBCSourceMap: RawSourceMap = {
|
||||
mappings:
|
||||
encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5], [6, 1, 0, 3]]]),
|
||||
names: [],
|
||||
sources: ['lib/b.js', 'lib/c.js'],
|
||||
version: 3
|
||||
const aToBCSourceMap: SourceMapInfo = {
|
||||
mapPath: null,
|
||||
map: {
|
||||
mappings:
|
||||
encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5], [6, 1, 0, 3]]]),
|
||||
names: [],
|
||||
sources: ['lib/b.js', 'lib/c.js'],
|
||||
version: 3,
|
||||
},
|
||||
origin: ContentOrigin.Provided,
|
||||
};
|
||||
const aSource = new SourceFile(
|
||||
_('/foo/src/a.js'), 'abdecf123', aToBCSourceMap, false, [bSource, cSource2], fs);
|
||||
_('/foo/src/a.js'), 'abdecf123', aToBCSourceMap, [bSource, cSource2], fs);
|
||||
|
||||
const aTocSourceMap = aSource.renderFlattenedSourceMap();
|
||||
expect(aTocSourceMap.version).toEqual(3);
|
||||
|
@ -562,46 +602,54 @@ runInEachFileSystem(() => {
|
|||
|
||||
describe('getOriginalLocation()', () => {
|
||||
it('should return null for source files with no flattened mappings', () => {
|
||||
const sourceFile =
|
||||
new SourceFile(_('/foo/src/index.js'), 'index contents', null, false, [], fs);
|
||||
const sourceFile = new SourceFile(_('/foo/src/index.js'), 'index contents', null, [], fs);
|
||||
expect(sourceFile.getOriginalLocation(1, 1)).toEqual(null);
|
||||
});
|
||||
|
||||
it('should return offset locations in multiple flattened original source files', () => {
|
||||
const cSource = new SourceFile(_('/foo/src/c.js'), 'bcd123', null, false, [], fs);
|
||||
const dSource = new SourceFile(_('/foo/src/d.js'), 'aef', null, false, [], fs);
|
||||
const cSource = new SourceFile(_('/foo/src/c.js'), 'bcd123', null, [], fs);
|
||||
const dSource = new SourceFile(_('/foo/src/d.js'), 'aef', null, [], fs);
|
||||
|
||||
const bSourceMap: RawSourceMap = {
|
||||
mappings: encode([
|
||||
[
|
||||
[0, 1, 0, 0], // "a" is in d.js [source 1]
|
||||
[1, 0, 0, 0], // "bcd" are in c.js [source 0]
|
||||
[4, 1, 0, 1], // "ef" are in d.js [source 1]
|
||||
],
|
||||
]),
|
||||
names: [],
|
||||
sources: ['c.js', 'd.js'],
|
||||
version: 3
|
||||
const bSourceMap: SourceMapInfo = {
|
||||
mapPath: null,
|
||||
map: {
|
||||
mappings: encode([
|
||||
[
|
||||
[0, 1, 0, 0], // "a" is in d.js [source 1]
|
||||
[1, 0, 0, 0], // "bcd" are in c.js [source 0]
|
||||
[4, 1, 0, 1], // "ef" are in d.js [source 1]
|
||||
],
|
||||
]),
|
||||
names: [],
|
||||
sources: ['c.js', 'd.js'],
|
||||
version: 3
|
||||
},
|
||||
origin: ContentOrigin.Provided,
|
||||
};
|
||||
const bSource = new SourceFile(
|
||||
_('/foo/src/b.js'), 'abcdef', bSourceMap, false, [cSource, dSource], fs);
|
||||
const bSource =
|
||||
new SourceFile(_('/foo/src/b.js'), 'abcdef', bSourceMap, [cSource, dSource], fs);
|
||||
|
||||
const aSourceMap: RawSourceMap = {
|
||||
mappings: encode([
|
||||
[
|
||||
[0, 0, 0, 0], [2, 0, 0, 3], // "c" is missing from first line
|
||||
],
|
||||
[
|
||||
[4, 0, 0, 2], // second line has new indentation, and starts with "c"
|
||||
[5, 0, 0, 5], // "f" is here
|
||||
],
|
||||
]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
const aSourceMap: SourceMapInfo = {
|
||||
mapPath: null,
|
||||
map: {
|
||||
mappings: encode([
|
||||
[
|
||||
[0, 0, 0, 0], [2, 0, 0, 3], // "c" is missing from first line
|
||||
],
|
||||
[
|
||||
[4, 0, 0, 2], // second line has new indentation, and starts
|
||||
// with "c"
|
||||
[5, 0, 0, 5], // "f" is here
|
||||
],
|
||||
]),
|
||||
names: [],
|
||||
sources: ['b.js'],
|
||||
version: 3
|
||||
},
|
||||
origin: ContentOrigin.Provided,
|
||||
};
|
||||
const aSource =
|
||||
new SourceFile(_('/foo/src/a.js'), 'abde\n cf', aSourceMap, false, [bSource], fs);
|
||||
new SourceFile(_('/foo/src/a.js'), 'abde\n cf', aSourceMap, [bSource], fs);
|
||||
|
||||
// Line 0
|
||||
expect(aSource.getOriginalLocation(0, 0)) // a
|
||||
|
@ -633,28 +681,32 @@ runInEachFileSystem(() => {
|
|||
});
|
||||
|
||||
it('should return offset locations across multiple lines', () => {
|
||||
const originalSource = new SourceFile(
|
||||
_('/foo/src/original.js'), 'abcdef\nghijk\nlmnop', null, false, [], fs);
|
||||
const generatedSourceMap: RawSourceMap = {
|
||||
mappings: encode([
|
||||
[
|
||||
[0, 0, 0, 0], // "ABC" [0,0] => [0,0]
|
||||
],
|
||||
[
|
||||
[0, 0, 1, 0], // "GHIJ" [1, 0] => [1,0]
|
||||
[4, 0, 0, 3], // "DEF" [1, 4] => [0,3]
|
||||
[7, 0, 1, 4], // "K" [1, 7] => [1,4]
|
||||
],
|
||||
[
|
||||
[0, 0, 2, 0], // "LMNOP" [2,0] => [2,0]
|
||||
],
|
||||
]),
|
||||
names: [],
|
||||
sources: ['original.js'],
|
||||
version: 3
|
||||
const originalSource =
|
||||
new SourceFile(_('/foo/src/original.js'), 'abcdef\nghijk\nlmnop', null, [], fs);
|
||||
const generatedSourceMap: SourceMapInfo = {
|
||||
mapPath: null,
|
||||
map: {
|
||||
mappings: encode([
|
||||
[
|
||||
[0, 0, 0, 0], // "ABC" [0,0] => [0,0]
|
||||
],
|
||||
[
|
||||
[0, 0, 1, 0], // "GHIJ" [1, 0] => [1,0]
|
||||
[4, 0, 0, 3], // "DEF" [1, 4] => [0,3]
|
||||
[7, 0, 1, 4], // "K" [1, 7] => [1,4]
|
||||
],
|
||||
[
|
||||
[0, 0, 2, 0], // "LMNOP" [2,0] => [2,0]
|
||||
],
|
||||
]),
|
||||
names: [],
|
||||
sources: ['original.js'],
|
||||
version: 3
|
||||
},
|
||||
origin: ContentOrigin.Provided,
|
||||
};
|
||||
const generatedSource = new SourceFile(
|
||||
_('/foo/src/generated.js'), 'ABC\nGHIJDEFK\nLMNOP', generatedSourceMap, false,
|
||||
_('/foo/src/generated.js'), 'ABC\nGHIJDEFK\nLMNOP', generatedSourceMap,
|
||||
[originalSource], fs);
|
||||
|
||||
// Line 0
|
||||
|
|
Loading…
Reference in New Issue