fix(ngcc): handle multiple original sources when flattening source-maps (#36027)
Previously the list of original segments that was searched for incoming mappings did not differentiate between different original source files. Now there is a separate array of segments to search for each of the original source files. PR Close #36027
This commit is contained in:
parent
348ff0c8ea
commit
a40be00e17
|
@ -90,7 +90,7 @@ export class SourceFile {
|
||||||
*/
|
*/
|
||||||
private flattenMappings(): Mapping[] {
|
private flattenMappings(): Mapping[] {
|
||||||
const mappings = parseMappings(this.rawMap, this.sources);
|
const mappings = parseMappings(this.rawMap, this.sources);
|
||||||
const originalSegments = extractOriginalSegments(mappings);
|
const originalSegmentsBySource = extractOriginalSegments(mappings);
|
||||||
const flattenedMappings: Mapping[] = [];
|
const flattenedMappings: Mapping[] = [];
|
||||||
for (let mappingIndex = 0; mappingIndex < mappings.length; mappingIndex++) {
|
for (let mappingIndex = 0; mappingIndex < mappings.length; mappingIndex++) {
|
||||||
const aToBmapping = mappings[mappingIndex];
|
const aToBmapping = mappings[mappingIndex];
|
||||||
|
@ -120,6 +120,8 @@ export class SourceFile {
|
||||||
// For mapping [0,0] the incoming start and end are 0 and 2 (i.e. the range a, b, c)
|
// For mapping [0,0] the incoming start and end are 0 and 2 (i.e. the range a, b, c)
|
||||||
// For mapping [4,2] the incoming start and end are 2 and 5 (i.e. the range c, d, e, f)
|
// For mapping [4,2] the incoming start and end are 2 and 5 (i.e. the range c, d, e, f)
|
||||||
//
|
//
|
||||||
|
|
||||||
|
const originalSegments = originalSegmentsBySource.get(bSource) !;
|
||||||
const incomingStart = aToBmapping.originalSegment;
|
const incomingStart = aToBmapping.originalSegment;
|
||||||
const incomingEndIndex = originalSegments.indexOf(incomingStart) + 1;
|
const incomingEndIndex = originalSegments.indexOf(incomingStart) + 1;
|
||||||
const incomingEnd = incomingEndIndex < originalSegments.length ?
|
const incomingEnd = incomingEndIndex < originalSegments.length ?
|
||||||
|
@ -337,8 +339,26 @@ export function parseMappings(
|
||||||
return mappings;
|
return mappings;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function extractOriginalSegments(mappings: Mapping[]): SegmentMarker[] {
|
/**
|
||||||
return mappings.map(mapping => mapping.originalSegment).sort(compareSegments);
|
* Extract the segment markers from the original source files in each mapping of an array of
|
||||||
|
* `mappings`.
|
||||||
|
*
|
||||||
|
* @param mappings The mappings whose original segments we want to extract
|
||||||
|
* @returns Return a map from original source-files (referenced in the `mappings`) to arrays of
|
||||||
|
* segment-markers sorted by their order in their source file.
|
||||||
|
*/
|
||||||
|
export function extractOriginalSegments(mappings: Mapping[]): Map<SourceFile, SegmentMarker[]> {
|
||||||
|
const originalSegments = new Map<SourceFile, SegmentMarker[]>();
|
||||||
|
for (const mapping of mappings) {
|
||||||
|
const originalSource = mapping.originalSource;
|
||||||
|
if (!originalSegments.has(originalSource)) {
|
||||||
|
originalSegments.set(originalSource, []);
|
||||||
|
}
|
||||||
|
const segments = originalSegments.get(originalSource) !;
|
||||||
|
segments.push(mapping.originalSegment);
|
||||||
|
}
|
||||||
|
originalSegments.forEach(segmentMarkers => segmentMarkers.sort(compareSegments));
|
||||||
|
return originalSegments;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function computeLineLengths(str: string): number[] {
|
export function computeLineLengths(str: string): number[] {
|
||||||
|
|
|
@ -58,12 +58,12 @@ runInEachFileSystem(() => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('extractOriginalSegments()', () => {
|
describe('extractOriginalSegments()', () => {
|
||||||
it('should return an empty array for source files with no source map',
|
it('should return an empty Map for source files with no source map',
|
||||||
() => { expect(extractOriginalSegments(parseMappings(null, []))).toEqual([]); });
|
() => { expect(extractOriginalSegments(parseMappings(null, []))).toEqual(new Map()); });
|
||||||
|
|
||||||
it('should be empty array for source files with no source map mappings', () => {
|
it('should be empty Map for source files with no source map mappings', () => {
|
||||||
const rawSourceMap: RawSourceMap = {mappings: '', names: [], sources: [], version: 3};
|
const rawSourceMap: RawSourceMap = {mappings: '', names: [], sources: [], version: 3};
|
||||||
expect(extractOriginalSegments(parseMappings(rawSourceMap, []))).toEqual([]);
|
expect(extractOriginalSegments(parseMappings(rawSourceMap, []))).toEqual(new Map());
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should parse the segments in ascending order of original position from the raw source map',
|
it('should parse the segments in ascending order of original position from the raw source map',
|
||||||
|
@ -75,12 +75,37 @@ runInEachFileSystem(() => {
|
||||||
sources: ['a.js'],
|
sources: ['a.js'],
|
||||||
version: 3
|
version: 3
|
||||||
};
|
};
|
||||||
expect(extractOriginalSegments(parseMappings(rawSourceMap, [originalSource]))).toEqual([
|
const originalSegments =
|
||||||
|
extractOriginalSegments(parseMappings(rawSourceMap, [originalSource]));
|
||||||
|
expect(originalSegments.get(originalSource)).toEqual([
|
||||||
{line: 0, column: 0},
|
{line: 0, column: 0},
|
||||||
{line: 0, column: 2},
|
{line: 0, column: 2},
|
||||||
{line: 0, column: 3},
|
{line: 0, column: 3},
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should create separate arrays for each original source file', () => {
|
||||||
|
const sourceA = new SourceFile(_('/foo/src/a.js'), 'abcdefg', null, false, []);
|
||||||
|
const sourceB = new SourceFile(_('/foo/src/b.js'), '1234567', null, false, []);
|
||||||
|
const rawSourceMap: RawSourceMap = {
|
||||||
|
mappings:
|
||||||
|
encode([[[0, 0, 0, 0], [2, 1, 0, 3], [4, 0, 0, 2], [5, 1, 0, 5], [6, 1, 0, 2]]]),
|
||||||
|
names: [],
|
||||||
|
sources: ['a.js', 'b.js'],
|
||||||
|
version: 3
|
||||||
|
};
|
||||||
|
const originalSegments =
|
||||||
|
extractOriginalSegments(parseMappings(rawSourceMap, [sourceA, sourceB]));
|
||||||
|
expect(originalSegments.get(sourceA)).toEqual([
|
||||||
|
{line: 0, column: 0},
|
||||||
|
{line: 0, column: 2},
|
||||||
|
]);
|
||||||
|
expect(originalSegments.get(sourceB)).toEqual([
|
||||||
|
{line: 0, column: 2},
|
||||||
|
{line: 0, column: 3},
|
||||||
|
{line: 0, column: 5},
|
||||||
|
]);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('findLastMappingIndexBefore', () => {
|
describe('findLastMappingIndexBefore', () => {
|
||||||
|
@ -313,15 +338,18 @@ runInEachFileSystem(() => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should merge mappings from flattened original source files', () => {
|
it('should merge mappings from flattened original source files', () => {
|
||||||
const cSource = new SourceFile(_('/foo/src/c.js'), 'bcd123e', null, false, []);
|
const cSource = new SourceFile(_('/foo/src/c.js'), 'bcd123', null, false, []);
|
||||||
|
const dSource = new SourceFile(_('/foo/src/d.js'), 'aef', null, false, []);
|
||||||
|
|
||||||
const bSourceMap: RawSourceMap = {
|
const bSourceMap: RawSourceMap = {
|
||||||
mappings: encode([[[1, 0, 0, 0], [4, 0, 0, 3], [4, 0, 0, 6], [5, 0, 0, 7]]]),
|
mappings: encode([[[0, 1, 0, 0], [1, 0, 0, 0], [4, 1, 0, 1]]]),
|
||||||
names: [],
|
names: [],
|
||||||
sources: ['c.js'],
|
sources: ['c.js', 'd.js'],
|
||||||
version: 3
|
version: 3
|
||||||
};
|
};
|
||||||
const bSource =
|
const bSource =
|
||||||
new SourceFile(_('/foo/src/b.js'), 'abcdef', bSourceMap, false, [cSource]);
|
new SourceFile(_('/foo/src/b.js'), 'abcdef', bSourceMap, false, [cSource, dSource]);
|
||||||
|
|
||||||
const aSourceMap: RawSourceMap = {
|
const aSourceMap: RawSourceMap = {
|
||||||
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]]]),
|
mappings: encode([[[0, 0, 0, 0], [2, 0, 0, 3], [4, 0, 0, 2], [5, 0, 0, 5]]]),
|
||||||
names: [],
|
names: [],
|
||||||
|
@ -332,6 +360,12 @@ runInEachFileSystem(() => {
|
||||||
new SourceFile(_('/foo/src/a.js'), 'abdecf', aSourceMap, false, [bSource]);
|
new SourceFile(_('/foo/src/a.js'), 'abdecf', aSourceMap, false, [bSource]);
|
||||||
|
|
||||||
expect(aSource.flattenedMappings).toEqual([
|
expect(aSource.flattenedMappings).toEqual([
|
||||||
|
{
|
||||||
|
generatedSegment: {line: 0, column: 0},
|
||||||
|
originalSource: dSource,
|
||||||
|
originalSegment: {line: 0, column: 0},
|
||||||
|
name: undefined
|
||||||
|
},
|
||||||
{
|
{
|
||||||
generatedSegment: {line: 0, column: 1},
|
generatedSegment: {line: 0, column: 1},
|
||||||
originalSource: cSource,
|
originalSource: cSource,
|
||||||
|
@ -346,14 +380,8 @@ runInEachFileSystem(() => {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
generatedSegment: {line: 0, column: 3},
|
generatedSegment: {line: 0, column: 3},
|
||||||
originalSource: cSource,
|
originalSource: dSource,
|
||||||
originalSegment: {line: 0, column: 3},
|
originalSegment: {line: 0, column: 1},
|
||||||
name: undefined
|
|
||||||
},
|
|
||||||
{
|
|
||||||
generatedSegment: {line: 0, column: 3},
|
|
||||||
originalSource: cSource,
|
|
||||||
originalSegment: {line: 0, column: 6},
|
|
||||||
name: undefined
|
name: undefined
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -364,8 +392,8 @@ runInEachFileSystem(() => {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
generatedSegment: {line: 0, column: 5},
|
generatedSegment: {line: 0, column: 5},
|
||||||
originalSource: cSource,
|
originalSource: dSource,
|
||||||
originalSegment: {line: 0, column: 7},
|
originalSegment: {line: 0, column: 2},
|
||||||
name: undefined
|
name: undefined
|
||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
|
|
Loading…
Reference in New Issue