fix(compiler): include leading whitespace in source-spans of i18n messages (#42062)
Previously, the way templates were tokenized meant that we lost information about the location of interpolations if the template contained encoded HTML entities. This meant that the mapping back to the source interpolated strings could be offset incorrectly. Also, the source-span assigned to an i18n message did not include leading whitespace. This confused the output source-mappings so that the first text nodes of the message stopped at the first non-whitespace character. This commit makes use of the previous refactorings, where more fine grain information was provided in text tokens, to enable the parser to identify the location of the interpolations in the original source more accurately. Fixes #41034 PR Close #42062
This commit is contained in:
parent
973f9b8d19
commit
f08516db09
|
@ -1,6 +1,6 @@
|
|||
.ɵɵelementStart(0, "div") // SOURCE: "/escaped_chars.html" "<div>\r\n "
|
||||
…
|
||||
// NOTE: the `\\r\\n` at the end of the next line will be unescaped to `\r\n`. If it was just `\r\n` it would get unescaped to the actual characters.
|
||||
.ɵɵtext(1, " Some Message Encoded character: \uD83D\uDE80\\n") // SOURCE: "/escaped_chars.html" "Some Message\r\n Encoded character: 🚀\\r\\n"
|
||||
.ɵɵtext(1, " Some Message Encoded character: \uD83D\uDE80\\n") // SOURCE: "/escaped_chars.html" "Some Message\r\n Encoded character: 🚀\r\n"
|
||||
…
|
||||
.ɵɵelementEnd() // SOURCE: "/escaped_chars.html" "</div>"
|
||||
|
|
|
@ -1652,6 +1652,78 @@ export declare class TestCmp {
|
|||
static ɵcmp: i0.ɵɵComponentDeclaration<TestCmp, "test-cmp", never, {}, {}, never, never>;
|
||||
}
|
||||
|
||||
/****************************************************************************************************
|
||||
* PARTIAL FILE: i18n_message_placeholder_entities.js
|
||||
****************************************************************************************************/
|
||||
import { Component } from '@angular/core';
|
||||
import * as i0 from "@angular/core";
|
||||
export class TestCmp {
|
||||
constructor() {
|
||||
this.one = 1;
|
||||
this.two = 2;
|
||||
}
|
||||
}
|
||||
TestCmp.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "0.0.0-PLACEHOLDER", ngImport: i0, type: TestCmp, deps: [], target: i0.ɵɵFactoryTarget.Component });
|
||||
TestCmp.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "12.0.0", version: "0.0.0-PLACEHOLDER", type: TestCmp, selector: "test-cmp", ngImport: i0, template: '<div i18n>Interpolation: {{ one }} Interpolation: {{ two }}</div>', isInline: true });
|
||||
i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "0.0.0-PLACEHOLDER", ngImport: i0, type: TestCmp, decorators: [{
|
||||
type: Component,
|
||||
args: [{
|
||||
selector: 'test-cmp',
|
||||
template: '<div i18n>Interpolation: {{ one }} Interpolation: {{ two }}</div>',
|
||||
}]
|
||||
}] });
|
||||
//# sourceMappingURL=i18n_message_placeholder_entities.js.map
|
||||
/****************************************************************************************************
|
||||
* PARTIAL FILE: i18n_message_placeholder_entities.js.map
|
||||
****************************************************************************************************/
|
||||
{"version":3,"file":"i18n_message_placeholder_entities.js","sourceRoot":"","sources":["../i18n_message_placeholder_entities.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,SAAS,EAAC,MAAM,eAAe,CAAC;;AAMxC,MAAM,OAAO,OAAO;IAJpB;QAKE,QAAG,GAAG,CAAC,CAAC;QACR,QAAG,GAAG,CAAC,CAAC;KACT;;+GAHY,OAAO;mGAAP,OAAO,gDAFR,wEAAwE;sGAEvE,OAAO;kBAJnB,SAAS;mBAAC;oBACT,QAAQ,EAAE,UAAU;oBACpB,QAAQ,EAAE,wEAAwE;iBACnF"}
|
||||
/****************************************************************************************************
|
||||
* PARTIAL FILE: i18n_message_placeholder_entities.d.ts
|
||||
****************************************************************************************************/
|
||||
import * as i0 from "@angular/core";
|
||||
export declare class TestCmp {
|
||||
one: number;
|
||||
two: number;
|
||||
static ɵfac: i0.ɵɵFactoryDeclaration<TestCmp, never>;
|
||||
static ɵcmp: i0.ɵɵComponentDeclaration<TestCmp, "test-cmp", never, {}, {}, never, never>;
|
||||
}
|
||||
|
||||
/****************************************************************************************************
|
||||
* PARTIAL FILE: i18n_message_placeholder_entities.js
|
||||
****************************************************************************************************/
|
||||
import { Component } from '@angular/core';
|
||||
import * as i0 from "@angular/core";
|
||||
export class TestCmp {
|
||||
constructor() {
|
||||
this.one = 1;
|
||||
this.two = 2;
|
||||
}
|
||||
}
|
||||
TestCmp.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "0.0.0-PLACEHOLDER", ngImport: i0, type: TestCmp, deps: [], target: i0.ɵɵFactoryTarget.Component });
|
||||
TestCmp.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "12.0.0", version: "0.0.0-PLACEHOLDER", type: TestCmp, selector: "test-cmp", ngImport: i0, template: '<div i18n>Interpolation: {{ one }} Interpolation: {{ two }}</div>', isInline: true });
|
||||
i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "0.0.0-PLACEHOLDER", ngImport: i0, type: TestCmp, decorators: [{
|
||||
type: Component,
|
||||
args: [{
|
||||
selector: 'test-cmp',
|
||||
template: '<div i18n>Interpolation: {{ one }} Interpolation: {{ two }}</div>',
|
||||
}]
|
||||
}] });
|
||||
//# sourceMappingURL=i18n_message_placeholder_entities.js.map
|
||||
/****************************************************************************************************
|
||||
* PARTIAL FILE: i18n_message_placeholder_entities.js.map
|
||||
****************************************************************************************************/
|
||||
{"version":3,"file":"i18n_message_placeholder_entities.js","sourceRoot":"","sources":["../i18n_message_placeholder_entities.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,SAAS,EAAC,MAAM,eAAe,CAAC;;AAMxC,MAAM,OAAO,OAAO;IAJpB;QAKE,QAAG,GAAG,CAAC,CAAC;QACR,QAAG,GAAG,CAAC,CAAC;KACT;;+GAHY,OAAO;mGAAP,OAAO,gDAFR,wEAAwE;sGAEvE,OAAO;kBAJnB,SAAS;mBAAC;oBACT,QAAQ,EAAE,UAAU;oBACpB,QAAQ,EAAE,wEAAwE;iBACnF"}
|
||||
/****************************************************************************************************
|
||||
* PARTIAL FILE: i18n_message_placeholder_entities.d.ts
|
||||
****************************************************************************************************/
|
||||
import * as i0 from "@angular/core";
|
||||
export declare class TestCmp {
|
||||
one: number;
|
||||
two: number;
|
||||
static ɵfac: i0.ɵɵFactoryDeclaration<TestCmp, never>;
|
||||
static ɵcmp: i0.ɵɵComponentDeclaration<TestCmp, "test-cmp", never, {}, {}, never, never>;
|
||||
}
|
||||
|
||||
/****************************************************************************************************
|
||||
* PARTIAL FILE: i18n_message_interpolation_whitespace.js
|
||||
****************************************************************************************************/
|
||||
|
|
|
@ -749,6 +749,40 @@
|
|||
"sourceMap": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "should handle encoded entities in i18n message source-mappings (full compile)",
|
||||
"inputFiles": [
|
||||
"i18n_message_placeholder_entities.ts"
|
||||
],
|
||||
"compilationModeFilter": [
|
||||
"full compile"
|
||||
],
|
||||
"compilerOptions": {
|
||||
"sourceMap": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "should handle encoded entities in i18n message source-mappings (partial compile)",
|
||||
"inputFiles": [
|
||||
"i18n_message_placeholder_entities.ts"
|
||||
],
|
||||
"expectations": [
|
||||
{
|
||||
"files": [
|
||||
{
|
||||
"generated": "i18n_message_placeholder_entities.js",
|
||||
"expected": "i18n_message_placeholder_entities_partial.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"compilationModeFilter": [
|
||||
"linked compile"
|
||||
],
|
||||
"compilerOptions": {
|
||||
"sourceMap": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "should correctly handle collapsed whitespace in interpolation placeholder i18n message source-mappings (full compile)",
|
||||
"inputFiles": [
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
` pre-p ${ // SOURCE: "/i18n_message_element_whitespace.ts" "pre-p\\n "
|
||||
` pre-p ${ // SOURCE: "/i18n_message_element_whitespace.ts" "\\n pre-p\\n "
|
||||
…
|
||||
"\uFFFD#2\uFFFD" // SOURCE: "/i18n_message_element_whitespace.ts" "<p>\\n "
|
||||
"\uFFFD#2\uFFFD" // SOURCE: "/i18n_message_element_whitespace.ts" "<p>"
|
||||
…
|
||||
}:START_PARAGRAPH: in-p ${ // SOURCE: "/i18n_message_element_whitespace.ts" "in-p\\n "
|
||||
}:START_PARAGRAPH: in-p ${ // SOURCE: "/i18n_message_element_whitespace.ts" "\\n in-p\\n "
|
||||
…
|
||||
"\uFFFD/#2\uFFFD" // SOURCE: "/i18n_message_element_whitespace.ts" "</p>"
|
||||
…
|
||||
}:CLOSE_PARAGRAPH: post-p\n` // SOURCE: "/i18n_message_element_whitespace.ts" "post-p\\n"
|
||||
}:CLOSE_PARAGRAPH: post-p\n` // SOURCE: "/i18n_message_element_whitespace.ts" "\\n post-p\\n"
|
||||
…
|
||||
|
||||
i0.ɵɵelementStart(0, "div") // SOURCE: "/i18n_message_element_whitespace.ts" "<div i18n>"
|
||||
…
|
||||
i0.ɵɵi18nStart(1, 0) // SOURCE: "/i18n_message_element_whitespace.ts" "<div i18n>"
|
||||
…
|
||||
i0.ɵɵelement(2, "p") // SOURCE: "/i18n_message_element_whitespace.ts" "<p>\\n "
|
||||
i0.ɵɵelement(2, "p") // SOURCE: "/i18n_message_element_whitespace.ts" "<p>"
|
||||
…
|
||||
i0.ɵɵi18nEnd() // SOURCE: "/i18n_message_element_whitespace.ts" "</div>"
|
||||
…
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
$localize` pre-p ${ // SOURCE: "/i18n_message_element_whitespace.ts" "pre-p\\n "
|
||||
$localize` pre-p ${ // SOURCE: "/i18n_message_element_whitespace.ts" "\\n pre-p\\n "
|
||||
…
|
||||
"\uFFFD#2\uFFFD" // SOURCE: "/i18n_message_element_whitespace.ts" "<p>\\n "
|
||||
"\uFFFD#2\uFFFD" // SOURCE: "/i18n_message_element_whitespace.ts" "<p>"
|
||||
…
|
||||
}:START_PARAGRAPH: in-p ${ // SOURCE: "/i18n_message_element_whitespace.ts" "in-p\\n "
|
||||
}:START_PARAGRAPH: in-p ${ // SOURCE: "/i18n_message_element_whitespace.ts" "\\n in-p\\n "
|
||||
…
|
||||
"\uFFFD/#2\uFFFD" // SOURCE: "/i18n_message_element_whitespace.ts" "</p>\\n "
|
||||
"\uFFFD/#2\uFFFD" // SOURCE: "/i18n_message_element_whitespace.ts" "</p>"
|
||||
…
|
||||
}:CLOSE_PARAGRAPH: post-p\n` // SOURCE: "/i18n_message_element_whitespace.ts" "post-p\\n"
|
||||
}:CLOSE_PARAGRAPH: post-p\n` // SOURCE: "/i18n_message_element_whitespace.ts" "\\n post-p\\n"
|
||||
…
|
||||
|
||||
.ɵɵelementStart(0, "div") // SOURCE: "/i18n_message_element_whitespace.ts" "<div i18n>\\n "
|
||||
.ɵɵelementStart(0, "div") // SOURCE: "/i18n_message_element_whitespace.ts" "<div i18n>"
|
||||
…
|
||||
.ɵɵi18nStart(1, 0) // SOURCE: "/i18n_message_element_whitespace.ts" "<div i18n>\\n "
|
||||
.ɵɵi18nStart(1, 0) // SOURCE: "/i18n_message_element_whitespace.ts" "<div i18n>"
|
||||
…
|
||||
.ɵɵelement(2, "p") // SOURCE: "/i18n_message_element_whitespace.ts" "<p>\\n "
|
||||
.ɵɵelement(2, "p") // SOURCE: "/i18n_message_element_whitespace.ts" "<p>"
|
||||
…
|
||||
.ɵɵi18nEnd() // SOURCE: "/i18n_message_element_whitespace.ts" "</div>'"
|
||||
…
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
`Interpolation: ${ // SOURCE: "/i18n_message_placeholder_entities.ts" "Interpolation: "
|
||||
…
|
||||
"\uFFFD0\uFFFD" // SOURCE: "/i18n_message_placeholder_entities.ts" "{{ one }}"
|
||||
…
|
||||
}:INTERPOLATION: Interpolation: ${ // SOURCE: "/i18n_message_placeholder_entities.ts" " Interpolation: "
|
||||
…
|
||||
"\uFFFD1\uFFFD" // SOURCE: "/i18n_message_placeholder_entities.ts" "{{ two }}"
|
||||
…
|
||||
}:INTERPOLATION_1:` // SOURCE: "/i18n_message_placeholder_entities.ts" "</div>"
|
|
@ -0,0 +1,10 @@
|
|||
import {Component} from '@angular/core';
|
||||
|
||||
@Component({
|
||||
selector: 'test-cmp',
|
||||
template: '<div i18n>Interpolation: {{ one }} Interpolation: {{ two }}</div>',
|
||||
})
|
||||
export class TestCmp {
|
||||
one = 1;
|
||||
two = 2;
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
$localize`Interpolation: ${ // SOURCE: "/i18n_message_placeholder_entities.ts" "Interpolation: "
|
||||
…
|
||||
"\uFFFD0\uFFFD" // SOURCE: "/i18n_message_placeholder_entities.ts" "{{ one }}"
|
||||
…
|
||||
}:INTERPOLATION: Interpolation: ${ // SOURCE: "/i18n_message_placeholder_entities.ts" " Interpolation: "
|
||||
…
|
||||
"\uFFFD1\uFFFD" // SOURCE: "/i18n_message_placeholder_entities.ts" "{{ two }}"
|
||||
…
|
||||
}:INTERPOLATION_1:` // SOURCE: "/i18n_message_placeholder_entities.ts" "</div>'"
|
|
@ -10,7 +10,7 @@
|
|||
.ɵɵtext(3) // SOURCE: "/update_mode.ts" "{{ 1 + 2 }}"
|
||||
…
|
||||
// TODO: Work out how to fix the broken segment for the last item in a template
|
||||
.ɵɵelem // SOURCE: "/update_mode.ts" "</div>"'
|
||||
.ɵɵelem // SOURCE: "/update_mode.ts" "</div>'"
|
||||
…
|
||||
// NOTE: Update mode
|
||||
.ɵɵtextInterpolate(1 + 2) // SOURCE: "/update_mode.ts" "{{ 1 + 2 }}"
|
||||
|
|
|
@ -464,27 +464,27 @@ runInEachFileSystem((os) => {
|
|||
// $localize expressions
|
||||
expectMapping(mappings, {
|
||||
sourceUrl: '../test.ts',
|
||||
source: 'pre-p\n ',
|
||||
source: '\n pre-p\n ',
|
||||
generated: '` pre-p ${',
|
||||
});
|
||||
expectMapping(mappings, {
|
||||
sourceUrl: '../test.ts',
|
||||
source: '<p>\n ',
|
||||
source: '<p>',
|
||||
generated: '"\\uFFFD#2\\uFFFD"',
|
||||
});
|
||||
expectMapping(mappings, {
|
||||
sourceUrl: '../test.ts',
|
||||
source: 'in-p\n ',
|
||||
source: '\n in-p\n ',
|
||||
generated: '}:START_PARAGRAPH: in-p ${',
|
||||
});
|
||||
expectMapping(mappings, {
|
||||
sourceUrl: '../test.ts',
|
||||
source: '</p>\n ',
|
||||
source: '</p>',
|
||||
generated: '"\\uFFFD/#2\\uFFFD"',
|
||||
});
|
||||
expectMapping(mappings, {
|
||||
sourceUrl: '../test.ts',
|
||||
source: 'post-p\n',
|
||||
source: '\n post-p\n',
|
||||
generated: '}:CLOSE_PARAGRAPH: post-p\n`',
|
||||
});
|
||||
// ivy instructions
|
||||
|
|
|
@ -7,10 +7,11 @@
|
|||
*/
|
||||
|
||||
import {Lexer as ExpressionLexer} from '../expression_parser/lexer';
|
||||
import {InterpolationPiece, Parser as ExpressionParser} from '../expression_parser/parser';
|
||||
import {Parser as ExpressionParser} from '../expression_parser/parser';
|
||||
import * as html from '../ml_parser/ast';
|
||||
import {getHtmlTagDefinition} from '../ml_parser/html_tags';
|
||||
import {InterpolationConfig} from '../ml_parser/interpolation_config';
|
||||
import {Token, TokenType} from '../ml_parser/lexer';
|
||||
import {ParseSourceSpan} from '../parse_util';
|
||||
|
||||
import * as i18n from './i18n_ast';
|
||||
|
@ -105,13 +106,18 @@ class _I18nVisitor implements html.Visitor {
|
|||
}
|
||||
|
||||
visitAttribute(attribute: html.Attribute, context: I18nMessageVisitorContext): i18n.Node {
|
||||
const node = this._visitTextWithInterpolation(
|
||||
attribute.value, attribute.valueSpan || attribute.sourceSpan, context, attribute.i18n);
|
||||
const node = attribute.valueTokens === undefined || attribute.valueTokens.length === 1 ?
|
||||
new i18n.Text(attribute.value, attribute.valueSpan || attribute.sourceSpan) :
|
||||
this._visitTextWithInterpolation(
|
||||
attribute.valueTokens, attribute.valueSpan || attribute.sourceSpan, context,
|
||||
attribute.i18n);
|
||||
return context.visitNodeFn(attribute, node);
|
||||
}
|
||||
|
||||
visitText(text: html.Text, context: I18nMessageVisitorContext): i18n.Node {
|
||||
const node = this._visitTextWithInterpolation(text.value, text.sourceSpan, context, text.i18n);
|
||||
const node = text.tokens.length === 1 ?
|
||||
new i18n.Text(text.value, text.sourceSpan) :
|
||||
this._visitTextWithInterpolation(text.tokens, text.sourceSpan, context, text.i18n);
|
||||
return context.visitNodeFn(text, node);
|
||||
}
|
||||
|
||||
|
@ -165,66 +171,36 @@ class _I18nVisitor implements html.Visitor {
|
|||
* @param previousI18n Any i18n metadata associated with this `text` from a previous pass.
|
||||
*/
|
||||
private _visitTextWithInterpolation(
|
||||
text: string, sourceSpan: ParseSourceSpan, context: I18nMessageVisitorContext,
|
||||
tokens: Token[], sourceSpan: ParseSourceSpan, context: I18nMessageVisitorContext,
|
||||
previousI18n: i18n.I18nMeta|undefined): i18n.Node {
|
||||
const {strings, expressions} = this._expressionParser.splitInterpolation(
|
||||
text, sourceSpan.start.toString(), this._interpolationConfig);
|
||||
|
||||
// No expressions, return a single text.
|
||||
if (expressions.length === 0) {
|
||||
return new i18n.Text(text, sourceSpan);
|
||||
}
|
||||
|
||||
// Return a sequence of `Text` and `Placeholder` nodes grouped in a `Container`.
|
||||
const nodes: i18n.Node[] = [];
|
||||
for (let i = 0; i < strings.length - 1; i++) {
|
||||
this._addText(nodes, strings[i], sourceSpan);
|
||||
this._addPlaceholder(nodes, context, expressions[i], sourceSpan);
|
||||
for (const token of tokens) {
|
||||
switch (token.type) {
|
||||
case TokenType.INTERPOLATION:
|
||||
case TokenType.ATTR_VALUE_INTERPOLATION:
|
||||
const expression = token.parts[1];
|
||||
const baseName = extractPlaceholderName(expression) || 'INTERPOLATION';
|
||||
const phName = context.placeholderRegistry.getPlaceholderName(baseName, expression);
|
||||
context.placeholderToContent[phName] = {
|
||||
text: token.parts.join(''),
|
||||
sourceSpan: token.sourceSpan
|
||||
};
|
||||
nodes.push(new i18n.Placeholder(expression, phName, token.sourceSpan));
|
||||
break;
|
||||
default:
|
||||
if (token.parts[0].length > 0) {
|
||||
nodes.push(new i18n.Text(token.parts[0], token.sourceSpan));
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
// The last index contains no expression
|
||||
this._addText(nodes, strings[strings.length - 1], sourceSpan);
|
||||
|
||||
// Whitespace removal may have invalidated the interpolation source-spans.
|
||||
reusePreviousSourceSpans(nodes, previousI18n);
|
||||
|
||||
return new i18n.Container(nodes, sourceSpan);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new `Text` node from the `textPiece` and add it to the `nodes` collection.
|
||||
*
|
||||
* @param nodes The nodes to which the created `Text` node should be added.
|
||||
* @param textPiece The text and relative span information for this `Text` node.
|
||||
* @param interpolationSpan The span of the whole interpolated text.
|
||||
*/
|
||||
private _addText(
|
||||
nodes: i18n.Node[], textPiece: InterpolationPiece, interpolationSpan: ParseSourceSpan): void {
|
||||
if (textPiece.text.length > 0) {
|
||||
// No need to add empty strings
|
||||
const stringSpan = getOffsetSourceSpan(interpolationSpan, textPiece);
|
||||
nodes.push(new i18n.Text(textPiece.text, stringSpan));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new `Placeholder` node from the `expression` and add it to the `nodes` collection.
|
||||
*
|
||||
* @param nodes The nodes to which the created `Text` node should be added.
|
||||
* @param context The current context of the visitor, used to compute and store placeholders.
|
||||
* @param expression The expression text and relative span information for this `Placeholder`
|
||||
* node.
|
||||
* @param interpolationSpan The span of the whole interpolated text.
|
||||
*/
|
||||
private _addPlaceholder(
|
||||
nodes: i18n.Node[], context: I18nMessageVisitorContext, expression: InterpolationPiece,
|
||||
interpolationSpan: ParseSourceSpan): void {
|
||||
const sourceSpan = getOffsetSourceSpan(interpolationSpan, expression);
|
||||
const baseName = extractPlaceholderName(expression.text) || 'INTERPOLATION';
|
||||
const phName = context.placeholderRegistry.getPlaceholderName(baseName, expression.text);
|
||||
const text = this._interpolationConfig.start + expression.text + this._interpolationConfig.end;
|
||||
context.placeholderToContent[phName] = {text, sourceSpan};
|
||||
nodes.push(new i18n.Placeholder(expression.text, phName, sourceSpan));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -247,7 +223,7 @@ function reusePreviousSourceSpans(nodes: i18n.Node[], previousI18n: i18n.I18nMet
|
|||
|
||||
if (previousI18n instanceof i18n.Container) {
|
||||
// The `previousI18n` is a `Container`, which means that this is a second i18n extraction pass
|
||||
// after whitespace has been removed from the AST ndoes.
|
||||
// after whitespace has been removed from the AST nodes.
|
||||
assertEquivalentNodes(previousI18n.children, nodes);
|
||||
|
||||
// Reuse the source-spans from the first pass.
|
||||
|
@ -282,14 +258,6 @@ function assertEquivalentNodes(previousNodes: i18n.Node[], nodes: i18n.Node[]):
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new `ParseSourceSpan` from the `sourceSpan`, offset by the `start` and `end` values.
|
||||
*/
|
||||
function getOffsetSourceSpan(
|
||||
sourceSpan: ParseSourceSpan, {start, end}: InterpolationPiece): ParseSourceSpan {
|
||||
return new ParseSourceSpan(sourceSpan.fullStart.moveBy(start), sourceSpan.fullStart.moveBy(end));
|
||||
}
|
||||
|
||||
const _CUSTOM_PH_EXP =
|
||||
/\/\/[\s\S]*i18n[\s\S]*\([\s\S]*ph[\s\S]*=[\s\S]*("|')([\s\S]*?)\1[\s\S]*\)/g;
|
||||
|
||||
|
|
|
@ -230,8 +230,11 @@ class _Tokenizer {
|
|||
this._consumeTagOpen(start);
|
||||
}
|
||||
} else if (!(this._tokenizeIcu && this._tokenizeExpansionForm())) {
|
||||
// In (possibly interpolated) text the end of the text is given by `isTextEnd()`, while
|
||||
// the premature end of an interpolation is given by the start of a new HTML element.
|
||||
this._consumeWithInterpolation(
|
||||
TokenType.TEXT, TokenType.INTERPOLATION, () => this._isTextEnd());
|
||||
TokenType.TEXT, TokenType.INTERPOLATION, () => this._isTextEnd(),
|
||||
() => this._isTagStart());
|
||||
}
|
||||
} catch (e) {
|
||||
this.handleError(e);
|
||||
|
@ -608,14 +611,18 @@ class _Tokenizer {
|
|||
if (this._cursor.peek() === chars.$SQ || this._cursor.peek() === chars.$DQ) {
|
||||
const quoteChar = this._cursor.peek();
|
||||
this._consumeQuote(quoteChar);
|
||||
// In an attribute then end of the attribute value and the premature end to an interpolation
|
||||
// are both triggered by the `quoteChar`.
|
||||
const endPredicate = () => this._cursor.peek() === quoteChar;
|
||||
this._consumeWithInterpolation(
|
||||
TokenType.ATTR_VALUE_TEXT, TokenType.ATTR_VALUE_INTERPOLATION,
|
||||
() => this._cursor.peek() === quoteChar);
|
||||
TokenType.ATTR_VALUE_TEXT, TokenType.ATTR_VALUE_INTERPOLATION, endPredicate,
|
||||
endPredicate);
|
||||
this._consumeQuote(quoteChar);
|
||||
} else {
|
||||
const endPredicate = () => isNameEnd(this._cursor.peek());
|
||||
this._consumeWithInterpolation(
|
||||
TokenType.ATTR_VALUE_TEXT, TokenType.ATTR_VALUE_INTERPOLATION, endPredicate);
|
||||
TokenType.ATTR_VALUE_TEXT, TokenType.ATTR_VALUE_INTERPOLATION, endPredicate,
|
||||
endPredicate);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -705,15 +712,21 @@ class _Tokenizer {
|
|||
|
||||
/**
|
||||
* Consume a string that may contain interpolation expressions.
|
||||
*
|
||||
* The first token consumed will be of `tokenType` and then there will be alternating
|
||||
* `interpolationTokenType` and `tokenType` tokens until the `endPredicate()` returns true.
|
||||
*
|
||||
* If an interpolation token ends prematurely it will have no end marker in its `parts` array.
|
||||
*
|
||||
* @param textTokenType the kind of tokens to interleave around interpolation tokens.
|
||||
* @param interpolationTokenType the kind of tokens that contain interpolation.
|
||||
* @param endPredicate a function that should return true when we should stop consuming.
|
||||
* @param endInterpolation a function that should return true if there is a premature end to an
|
||||
* interpolation expression - i.e. before we get to the normal interpolation closing marker.
|
||||
*/
|
||||
private _consumeWithInterpolation(
|
||||
textTokenType: TokenType, interpolationTokenType: TokenType, endPredicate: () => boolean) {
|
||||
textTokenType: TokenType, interpolationTokenType: TokenType, endPredicate: () => boolean,
|
||||
endInterpolation: () => boolean) {
|
||||
this._beginToken(textTokenType);
|
||||
const parts: string[] = [];
|
||||
|
||||
|
@ -722,7 +735,7 @@ class _Tokenizer {
|
|||
if (this._interpolationConfig && this._attemptStr(this._interpolationConfig.start)) {
|
||||
this._endToken([this._processCarriageReturns(parts.join(''))], current);
|
||||
parts.length = 0;
|
||||
this._consumeInterpolation(interpolationTokenType, current);
|
||||
this._consumeInterpolation(interpolationTokenType, current, endInterpolation);
|
||||
this._beginToken(textTokenType);
|
||||
} else if (this._cursor.peek() === chars.$AMPERSAND) {
|
||||
this._endToken([this._processCarriageReturns(parts.join(''))]);
|
||||
|
@ -741,8 +754,17 @@ class _Tokenizer {
|
|||
this._endToken([this._processCarriageReturns(parts.join(''))]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Consume a block of text that has been interpreted as an Angular interpolation.
|
||||
*
|
||||
* @param interpolationTokenType the type of the interpolation token to generate.
|
||||
* @param interpolationStart a cursor that points to the start of this interpolation.
|
||||
* @param prematureEndPredicate a function that should return true if the next characters indicate
|
||||
* an end to the interpolation before its normal closing marker.
|
||||
*/
|
||||
private _consumeInterpolation(
|
||||
interpolationTokenType: TokenType, interpolationStart: CharacterCursor) {
|
||||
interpolationTokenType: TokenType, interpolationStart: CharacterCursor,
|
||||
prematureEndPredicate: (() => boolean)|null) {
|
||||
const parts: string[] = [];
|
||||
this._beginToken(interpolationTokenType, interpolationStart);
|
||||
parts.push(this._interpolationConfig.start);
|
||||
|
@ -751,7 +773,8 @@ class _Tokenizer {
|
|||
const expressionStart = this._cursor.clone();
|
||||
let inQuote: number|null = null;
|
||||
let inComment = false;
|
||||
while (this._cursor.peek() !== chars.$EOF) {
|
||||
while (this._cursor.peek() !== chars.$EOF &&
|
||||
(prematureEndPredicate === null || !prematureEndPredicate())) {
|
||||
const current = this._cursor.clone();
|
||||
|
||||
if (this._isTagStart()) {
|
||||
|
@ -783,7 +806,7 @@ class _Tokenizer {
|
|||
} else if (char === inQuote) {
|
||||
// Exiting the current quoted string
|
||||
inQuote = null;
|
||||
} else if (!inComment && chars.isQuote(char)) {
|
||||
} else if (!inComment && inQuote === null && chars.isQuote(char)) {
|
||||
// Entering a new quoted string
|
||||
inQuote = char;
|
||||
}
|
||||
|
@ -795,7 +818,7 @@ class _Tokenizer {
|
|||
}
|
||||
|
||||
private _getProcessedChars(start: CharacterCursor, end: CharacterCursor): string {
|
||||
return this._processCarriageReturns(end.getChars(start))
|
||||
return this._processCarriageReturns(end.getChars(start));
|
||||
}
|
||||
|
||||
private _isTextEnd(): boolean {
|
||||
|
|
|
@ -35,7 +35,10 @@ class LocalizeSerializerVisitor implements i18n.Visitor {
|
|||
// Two literal pieces in a row means that there was some comment node in-between.
|
||||
context[context.length - 1].text += text.value;
|
||||
} else {
|
||||
context.push(new o.LiteralPiece(text.value, text.sourceSpan));
|
||||
const sourceSpan = new ParseSourceSpan(
|
||||
text.sourceSpan.fullStart, text.sourceSpan.end, text.sourceSpan.fullStart,
|
||||
text.sourceSpan.details);
|
||||
context.push(new o.LiteralPiece(text.value, sourceSpan));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -90,7 +93,7 @@ function getSourceSpan(message: i18n.Message): ParseSourceSpan {
|
|||
const startNode = message.nodes[0];
|
||||
const endNode = message.nodes[message.nodes.length - 1];
|
||||
return new ParseSourceSpan(
|
||||
startNode.sourceSpan.start, endNode.sourceSpan.end, startNode.sourceSpan.fullStart,
|
||||
startNode.sourceSpan.fullStart, endNode.sourceSpan.end, startNode.sourceSpan.fullStart,
|
||||
startNode.sourceSpan.details);
|
||||
}
|
||||
|
||||
|
|
|
@ -316,6 +316,32 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
|
|||
]);
|
||||
});
|
||||
|
||||
it('should end interpolation on an unescaped matching quote', () => {
|
||||
expect(tokenizeAndHumanizeParts('<t a="{{ a \\" \' b ">')).toEqual([
|
||||
[lex.TokenType.TAG_OPEN_START, '', 't'],
|
||||
[lex.TokenType.ATTR_NAME, '', 'a'],
|
||||
[lex.TokenType.ATTR_QUOTE, '"'],
|
||||
[lex.TokenType.ATTR_VALUE_TEXT, ''],
|
||||
[lex.TokenType.ATTR_VALUE_INTERPOLATION, '{{', ' a \\" \' b '],
|
||||
[lex.TokenType.ATTR_VALUE_TEXT, ''],
|
||||
[lex.TokenType.ATTR_QUOTE, '"'],
|
||||
[lex.TokenType.TAG_OPEN_END],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
|
||||
expect(tokenizeAndHumanizeParts('<t a=\'{{ a " \\\' b \'>')).toEqual([
|
||||
[lex.TokenType.TAG_OPEN_START, '', 't'],
|
||||
[lex.TokenType.ATTR_NAME, '', 'a'],
|
||||
[lex.TokenType.ATTR_QUOTE, '\''],
|
||||
[lex.TokenType.ATTR_VALUE_TEXT, ''],
|
||||
[lex.TokenType.ATTR_VALUE_INTERPOLATION, '{{', ' a " \\\' b '],
|
||||
[lex.TokenType.ATTR_VALUE_TEXT, ''],
|
||||
[lex.TokenType.ATTR_QUOTE, '\''],
|
||||
[lex.TokenType.TAG_OPEN_END],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse attributes with prefix', () => {
|
||||
expect(tokenizeAndHumanizeParts('<t ns1:a>')).toEqual([
|
||||
[lex.TokenType.TAG_OPEN_START, '', 't'],
|
||||
|
@ -593,14 +619,15 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
|
|||
});
|
||||
|
||||
it('should parse interpolation', () => {
|
||||
expect(tokenizeAndHumanizeParts('{{ a }}b{{ c // comment }}d{{ e "}}" f }}g{{ h // " i }}'))
|
||||
expect(tokenizeAndHumanizeParts(
|
||||
'{{ a }}b{{ c // comment }}d{{ e "}} \' " f }}g{{ h // " i }}'))
|
||||
.toEqual([
|
||||
[lex.TokenType.TEXT, ''],
|
||||
[lex.TokenType.INTERPOLATION, '{{', ' a ', '}}'],
|
||||
[lex.TokenType.TEXT, 'b'],
|
||||
[lex.TokenType.INTERPOLATION, '{{', ' c // comment ', '}}'],
|
||||
[lex.TokenType.TEXT, 'd'],
|
||||
[lex.TokenType.INTERPOLATION, '{{', ' e "}}" f ', '}}'],
|
||||
[lex.TokenType.INTERPOLATION, '{{', ' e "}} \' " f ', '}}'],
|
||||
[lex.TokenType.TEXT, 'g'],
|
||||
[lex.TokenType.INTERPOLATION, '{{', ' h // " i ', '}}'],
|
||||
[lex.TokenType.TEXT, ''],
|
||||
|
@ -735,6 +762,18 @@ import {ParseLocation, ParseSourceFile, ParseSourceSpan} from '../../src/parse_u
|
|||
]);
|
||||
});
|
||||
|
||||
it('should end interpolation on a valid closing tag', () => {
|
||||
expect(tokenizeAndHumanizeParts('<p>{{ a </p>')).toEqual([
|
||||
[lex.TokenType.TAG_OPEN_START, '', 'p'],
|
||||
[lex.TokenType.TAG_OPEN_END],
|
||||
[lex.TokenType.TEXT, ''],
|
||||
[lex.TokenType.INTERPOLATION, '{{', ' a '],
|
||||
[lex.TokenType.TEXT, ''],
|
||||
[lex.TokenType.TAG_CLOSE, '', 'p'],
|
||||
[lex.TokenType.EOF],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should break out of interpolation in text token on valid CDATA', () => {
|
||||
expect(tokenizeAndHumanizeParts('{{ a }<![CDATA[]]>}')).toEqual([
|
||||
[lex.TokenType.TEXT, ''],
|
||||
|
|
|
@ -478,7 +478,7 @@ describe('serializeI18nMessageForLocalize', () => {
|
|||
expect(messageParts[3].text).toEqual('');
|
||||
expect(messageParts[3].sourceSpan.toString()).toEqual('');
|
||||
expect(messageParts[4].text).toEqual(' D');
|
||||
expect(messageParts[4].sourceSpan.toString()).toEqual('D');
|
||||
expect(messageParts[4].sourceSpan.toString()).toEqual(' D');
|
||||
|
||||
expect(placeHolders[0].text).toEqual('START_TAG_SPAN');
|
||||
expect(placeHolders[0].sourceSpan.toString()).toEqual('<span>');
|
||||
|
|
Loading…
Reference in New Issue