fix(ngcc): don't crash on cyclic source-map references (#36452)
The source-map flattening was throwing an error when there is a cyclic dependency between source files and source-maps. The error was either a custom one describing the cycle, or a "Maximum call stack size exceeded" one. Now this is handled more leniently, resulting in a partially loaded source file (or source-map) and a warning logged. Fixes #35727 Fixes #35757 Fixes https://github.com/angular/angular-cli/issues/17106 Fixes https://github.com/angular/angular-cli/issues/17115 PR Close #36452
This commit is contained in:
parent
76a8cd57ae
commit
ee70a18a75
|
@ -36,7 +36,7 @@ export function renderSourceAndMap(
|
|||
{file: generatedPath, source: generatedPath, includeContent: true});
|
||||
|
||||
try {
|
||||
const loader = new SourceFileLoader(fs);
|
||||
const loader = new SourceFileLoader(fs, logger);
|
||||
const generatedFile = loader.loadSourceFile(
|
||||
generatedPath, generatedContent, {map: generatedMap, mapPath: generatedMapPath});
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import {commentRegex, fromComment, mapFileCommentRegex} from 'convert-source-map';
|
||||
|
||||
import {absoluteFrom, AbsoluteFsPath, FileSystem} from '../../../src/ngtsc/file_system';
|
||||
import {Logger} from '../logging/logger';
|
||||
|
||||
import {RawSourceMap} from './raw_source_map';
|
||||
import {SourceFile} from './source_file';
|
||||
|
@ -22,61 +23,70 @@ import {SourceFile} from './source_file';
|
|||
* mappings to other `SourceFile` objects as necessary.
|
||||
*/
|
||||
export class SourceFileLoader {
|
||||
constructor(private fs: FileSystem) {}
|
||||
private currentPaths: AbsoluteFsPath[] = [];
|
||||
|
||||
constructor(private fs: FileSystem, private logger: Logger) {}
|
||||
|
||||
/**
|
||||
* Load a source file, compute its source map, and recursively load any referenced source files.
|
||||
*
|
||||
* @param sourcePath The path to the source file to load.
|
||||
* @param contents The contents of the source file to load (if known).
|
||||
* The contents may be known because the source file was inlined into a source map.
|
||||
* @param contents The contents of the source file to load.
|
||||
* @param mapAndPath The raw source-map and the path to the source-map file.
|
||||
* @returns a SourceFile object created from the `contents` and provided source-map info.
|
||||
*/
|
||||
loadSourceFile(sourcePath: AbsoluteFsPath, contents: string, mapAndPath: MapAndPath): SourceFile;
|
||||
/**
|
||||
* The overload used internally to load source files referenced in a source-map.
|
||||
*
|
||||
* In this case there is no guarantee that it will return a non-null SourceMap.
|
||||
*
|
||||
* @param sourcePath The path to the source file to load.
|
||||
* @param contents The contents of the source file to load, if provided inline.
|
||||
* If it is not known the contents will be read from the file at the `sourcePath`.
|
||||
* @param mapAndPath The raw source-map and the path to the source-map file, if known.
|
||||
* @param previousPaths An internal parameter used for cyclic dependency tracking.
|
||||
* @param mapAndPath The raw source-map and the path to the source-map file.
|
||||
*
|
||||
* @returns a SourceFile if the content for one was provided or able to be loaded from disk,
|
||||
* `null` otherwise.
|
||||
*/
|
||||
loadSourceFile(sourcePath: AbsoluteFsPath, contents: string, mapAndPath: MapAndPath): SourceFile;
|
||||
loadSourceFile(sourcePath: AbsoluteFsPath, contents: string|null): SourceFile|null;
|
||||
loadSourceFile(sourcePath: AbsoluteFsPath): SourceFile|null;
|
||||
loadSourceFile(sourcePath: AbsoluteFsPath, contents?: string|null, mapAndPath?: null): SourceFile
|
||||
|null;
|
||||
loadSourceFile(
|
||||
sourcePath: AbsoluteFsPath, contents: string|null, mapAndPath: null,
|
||||
previousPaths: AbsoluteFsPath[]): SourceFile|null;
|
||||
loadSourceFile(
|
||||
sourcePath: AbsoluteFsPath, contents: string|null = null, mapAndPath: MapAndPath|null = null,
|
||||
previousPaths: AbsoluteFsPath[] = []): SourceFile|null {
|
||||
if (contents === null) {
|
||||
if (!this.fs.exists(sourcePath)) {
|
||||
return null;
|
||||
sourcePath: AbsoluteFsPath, contents: string|null = null,
|
||||
mapAndPath: MapAndPath|null = null): SourceFile|null {
|
||||
const previousPaths = this.currentPaths.slice();
|
||||
try {
|
||||
if (contents === null) {
|
||||
if (!this.fs.exists(sourcePath)) {
|
||||
return null;
|
||||
}
|
||||
contents = this.readSourceFile(sourcePath);
|
||||
}
|
||||
|
||||
// Track source file paths if we have loaded them from disk so that we don't get into an
|
||||
// infinite recursion
|
||||
if (previousPaths.includes(sourcePath)) {
|
||||
throw new Error(`Circular source file mapping dependency: ${
|
||||
previousPaths.join(' -> ')} -> ${sourcePath}`);
|
||||
// If not provided try to load the source map based on the source itself
|
||||
if (mapAndPath === null) {
|
||||
mapAndPath = this.loadSourceMap(sourcePath, contents);
|
||||
}
|
||||
previousPaths = previousPaths.concat([sourcePath]);
|
||||
|
||||
contents = this.fs.readFile(sourcePath);
|
||||
let map: RawSourceMap|null = null;
|
||||
let inline = true;
|
||||
let sources: (SourceFile|null)[] = [];
|
||||
if (mapAndPath !== null) {
|
||||
const basePath = mapAndPath.mapPath || sourcePath;
|
||||
sources = this.processSources(basePath, mapAndPath.map);
|
||||
map = mapAndPath.map;
|
||||
inline = mapAndPath.mapPath === null;
|
||||
}
|
||||
|
||||
return new SourceFile(sourcePath, contents, map, inline, sources);
|
||||
} catch (e) {
|
||||
this.logger.warn(
|
||||
`Unable to fully load ${sourcePath} for source-map flattening: ${e.message}`);
|
||||
return null;
|
||||
} finally {
|
||||
// We are finished with this recursion so revert the paths being tracked
|
||||
this.currentPaths = previousPaths;
|
||||
}
|
||||
|
||||
// If not provided try to load the source map based on the source itself
|
||||
if (mapAndPath === null) {
|
||||
mapAndPath = this.loadSourceMap(sourcePath, contents);
|
||||
}
|
||||
|
||||
let map: RawSourceMap|null = null;
|
||||
let inline = true;
|
||||
let sources: (SourceFile|null)[] = [];
|
||||
if (mapAndPath !== null) {
|
||||
const basePath = mapAndPath.mapPath || sourcePath;
|
||||
sources = this.processSources(basePath, mapAndPath.map, previousPaths);
|
||||
map = mapAndPath.map;
|
||||
inline = mapAndPath.mapPath === null;
|
||||
}
|
||||
|
||||
return new SourceFile(sourcePath, contents, map, inline, sources);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -97,15 +107,17 @@ export class SourceFileLoader {
|
|||
try {
|
||||
const fileName = external[1] || external[2];
|
||||
const externalMapPath = this.fs.resolve(this.fs.dirname(sourcePath), fileName);
|
||||
return {map: this.loadRawSourceMap(externalMapPath), mapPath: externalMapPath};
|
||||
} catch {
|
||||
return {map: this.readRawSourceMap(externalMapPath), mapPath: externalMapPath};
|
||||
} catch (e) {
|
||||
this.logger.warn(
|
||||
`Unable to fully load ${sourcePath} for source-map flattening: ${e.message}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const impliedMapPath = absoluteFrom(sourcePath + '.map');
|
||||
if (this.fs.exists(impliedMapPath)) {
|
||||
return {map: this.loadRawSourceMap(impliedMapPath), mapPath: impliedMapPath};
|
||||
return {map: this.readRawSourceMap(impliedMapPath), mapPath: impliedMapPath};
|
||||
}
|
||||
|
||||
return null;
|
||||
|
@ -115,24 +127,47 @@ export class SourceFileLoader {
|
|||
* Iterate over each of the "sources" for this source file's source map, recursively loading each
|
||||
* source file and its associated source map.
|
||||
*/
|
||||
private processSources(
|
||||
basePath: AbsoluteFsPath, map: RawSourceMap,
|
||||
previousPaths: AbsoluteFsPath[]): (SourceFile|null)[] {
|
||||
private processSources(basePath: AbsoluteFsPath, map: RawSourceMap): (SourceFile|null)[] {
|
||||
const sourceRoot = this.fs.resolve(this.fs.dirname(basePath), map.sourceRoot || '');
|
||||
return map.sources.map((source, index) => {
|
||||
const path = this.fs.resolve(sourceRoot, source);
|
||||
const content = map.sourcesContent && map.sourcesContent[index] || null;
|
||||
return this.loadSourceFile(path, content, null, previousPaths);
|
||||
return this.loadSourceFile(path, content, null);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the contents of the source file from disk.
|
||||
*
|
||||
* @param sourcePath The path to the source file.
|
||||
*/
|
||||
private readSourceFile(sourcePath: AbsoluteFsPath): string {
|
||||
this.trackPath(sourcePath);
|
||||
return this.fs.readFile(sourcePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the source map from the file at `mapPath`, parsing its JSON contents into a `RawSourceMap`
|
||||
* object.
|
||||
*
|
||||
* @param mapPath The path to the source-map file.
|
||||
*/
|
||||
private loadRawSourceMap(mapPath: AbsoluteFsPath): RawSourceMap {
|
||||
private readRawSourceMap(mapPath: AbsoluteFsPath): RawSourceMap {
|
||||
this.trackPath(mapPath);
|
||||
return JSON.parse(this.fs.readFile(mapPath));
|
||||
}
|
||||
|
||||
/**
|
||||
* Track source file paths if we have loaded them from disk so that we don't get into an infinite
|
||||
* recursion.
|
||||
*/
|
||||
private trackPath(path: AbsoluteFsPath): void {
|
||||
if (this.currentPaths.includes(path)) {
|
||||
throw new Error(
|
||||
`Circular source file mapping dependency: ${this.currentPaths.join(' -> ')} -> ${path}`);
|
||||
}
|
||||
this.currentPaths.push(path);
|
||||
}
|
||||
}
|
||||
|
||||
/** A small helper structure that is returned from `loadSourceMap()`. */
|
||||
|
|
|
@ -11,16 +11,19 @@ import {fromObject} from 'convert-source-map';
|
|||
import {runInEachFileSystem} from '../../../src/ngtsc/file_system/testing';
|
||||
import {RawSourceMap} from '../../src/sourcemaps/raw_source_map';
|
||||
import {SourceFileLoader as SourceFileLoader} from '../../src/sourcemaps/source_file_loader';
|
||||
import {MockLogger} from '../helpers/mock_logger';
|
||||
|
||||
runInEachFileSystem(() => {
|
||||
describe('SourceFileLoader', () => {
|
||||
let fs: FileSystem;
|
||||
let logger: MockLogger;
|
||||
let _: typeof absoluteFrom;
|
||||
let registry: SourceFileLoader;
|
||||
beforeEach(() => {
|
||||
fs = getFileSystem();
|
||||
logger = new MockLogger();
|
||||
_ = absoluteFrom;
|
||||
registry = new SourceFileLoader(fs);
|
||||
registry = new SourceFileLoader(fs, logger);
|
||||
});
|
||||
|
||||
describe('loadSourceFile', () => {
|
||||
|
@ -182,31 +185,75 @@ runInEachFileSystem(() => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should fail if there is a cyclic dependency in files loaded from disk', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
it('should log a warning if there is a cyclic dependency in source files loaded from disk',
|
||||
() => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
const aPath = _('/foo/src/a.js');
|
||||
fs.writeFile(
|
||||
aPath,
|
||||
'a content\n' +
|
||||
fromObject(createRawSourceMap({file: 'a.js', sources: ['b.js']})).toComment());
|
||||
const aMap = createRawSourceMap({file: 'a.js', sources: ['b.js']});
|
||||
|
||||
const bPath = _('/foo/src/b.js');
|
||||
fs.writeFile(
|
||||
bPath,
|
||||
'b content\n' +
|
||||
fromObject(createRawSourceMap({file: 'b.js', sources: ['c.js']})).toComment());
|
||||
const aPath = _('/foo/src/a.js');
|
||||
fs.writeFile(aPath, 'a content\n' + fromObject(aMap).toComment());
|
||||
|
||||
const cPath = _('/foo/src/c.js');
|
||||
fs.writeFile(
|
||||
cPath,
|
||||
'c content\n' +
|
||||
fromObject(createRawSourceMap({file: 'c.js', sources: ['a.js']})).toComment());
|
||||
const bPath = _('/foo/src/b.js');
|
||||
fs.writeFile(
|
||||
bPath,
|
||||
'b content\n' +
|
||||
fromObject(createRawSourceMap({file: 'b.js', sources: ['c.js']})).toComment());
|
||||
|
||||
expect(() => registry.loadSourceFile(aPath))
|
||||
.toThrowError(`Circular source file mapping dependency: ${aPath} -> ${bPath} -> ${
|
||||
cPath} -> ${aPath}`);
|
||||
});
|
||||
const cPath = _('/foo/src/c.js');
|
||||
fs.writeFile(
|
||||
cPath,
|
||||
'c content\n' +
|
||||
fromObject(createRawSourceMap({file: 'c.js', sources: ['a.js']})).toComment());
|
||||
|
||||
const sourceFile = registry.loadSourceFile(aPath)!;
|
||||
expect(sourceFile).not.toBe(null!);
|
||||
expect(sourceFile.contents).toEqual('a content\n');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/a.js'));
|
||||
expect(sourceFile.rawMap).toEqual(aMap);
|
||||
expect(sourceFile.sources.length).toEqual(1);
|
||||
|
||||
expect(logger.logs.warn[0][0])
|
||||
.toContain(
|
||||
`Circular source file mapping dependency: ` +
|
||||
`${aPath} -> ${bPath} -> ${cPath} -> ${aPath}`);
|
||||
});
|
||||
|
||||
it('should log a warning if there is a cyclic dependency in source maps loaded from disk',
|
||||
() => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
||||
// Create a self-referencing source-map
|
||||
const aMap = createRawSourceMap({
|
||||
file: 'a.js',
|
||||
sources: ['a.js'],
|
||||
sourcesContent: ['inline a.js content\n//# sourceMappingURL=a.js.map']
|
||||
});
|
||||
const aMapPath = _('/foo/src/a.js.map');
|
||||
fs.writeFile(aMapPath, JSON.stringify(aMap));
|
||||
|
||||
const aPath = _('/foo/src/a.js');
|
||||
fs.writeFile(aPath, 'a.js content\n//# sourceMappingURL=a.js.map');
|
||||
|
||||
const sourceFile = registry.loadSourceFile(aPath)!;
|
||||
expect(sourceFile).not.toBe(null!);
|
||||
expect(sourceFile.contents).toEqual('a.js content\n');
|
||||
expect(sourceFile.sourcePath).toEqual(_('/foo/src/a.js'));
|
||||
expect(sourceFile.rawMap).toEqual(aMap);
|
||||
expect(sourceFile.sources.length).toEqual(1);
|
||||
|
||||
expect(logger.logs.warn[0][0])
|
||||
.toContain(
|
||||
`Circular source file mapping dependency: ` +
|
||||
`${aPath} -> ${aMapPath} -> ${aMapPath}`);
|
||||
|
||||
const innerSourceFile = sourceFile.sources[0]!;
|
||||
expect(innerSourceFile).not.toBe(null!);
|
||||
expect(innerSourceFile.contents).toEqual('inline a.js content\n');
|
||||
expect(innerSourceFile.sourcePath).toEqual(_('/foo/src/a.js'));
|
||||
expect(innerSourceFile.rawMap).toEqual(null);
|
||||
expect(innerSourceFile.sources.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('should not fail if there is a cyclic dependency in filenames of inline sources', () => {
|
||||
fs.ensureDir(_('/foo/src'));
|
||||
|
|
Loading…
Reference in New Issue